hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acf1e6932edaa4822137b56af879e2fd0244836e | 2,820 | py | Python | st2reactor/st2reactor/cmd/timersengine.py | shusugmt/st2 | 31da26badfb4ca3fb3e8cae07cfeec4791191afd | [
"Apache-2.0"
] | 1 | 2020-11-09T21:05:33.000Z | 2020-11-09T21:05:33.000Z | st2reactor/st2reactor/cmd/timersengine.py | ellerbrock/st2 | b3a0d9f82053c1fd5adb616dc8331bad427cd11f | [
"Apache-2.0"
] | 3 | 2021-03-25T23:57:10.000Z | 2021-03-26T00:01:05.000Z | st2reactor/st2reactor/cmd/timersengine.py | ellerbrock/st2 | b3a0d9f82053c1fd5adb616dc8331bad427cd11f | [
"Apache-2.0"
] | null | null | null | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import sys
import eventlet
from oslo_config import cfg
from st2common import log as logging
from st2common.constants.timer import TIMER_ENABLED_LOG_LINE, TIMER_DISABLED_LOG_LINE
from st2common.logging.misc import get_logger_name_for_module
from st2common.service_setup import setup as common_setup
from st2common.service_setup import teardown as common_teardown
from st2common.util.monkey_patch import monkey_patch
from st2reactor.timer import config
from st2reactor.timer.base import St2Timer
monkey_patch()
LOGGER_NAME = get_logger_name_for_module(sys.modules[__name__])
LOG = logging.getLogger(LOGGER_NAME)
def _setup():
common_setup(service='timer_engine', config=config, setup_db=True, register_mq_exchanges=True,
register_signal_handlers=True)
def _teardown():
common_teardown()
def _kickoff_timer(timer):
timer.start()
def _run_worker():
LOG.info('(PID=%s) TimerEngine started.', os.getpid())
timer = None
try:
timer_thread = None
if cfg.CONF.timer.enable or cfg.CONF.timersengine.enable:
local_tz = cfg.CONF.timer.local_timezone or cfg.CONF.timersengine.local_timezone
timer = St2Timer(local_timezone=local_tz)
timer_thread = eventlet.spawn(_kickoff_timer, timer)
LOG.info(TIMER_ENABLED_LOG_LINE)
return timer_thread.wait()
else:
LOG.info(TIMER_DISABLED_LOG_LINE)
except (KeyboardInterrupt, SystemExit):
LOG.info('(PID=%s) TimerEngine stopped.', os.getpid())
except:
LOG.exception('(PID:%s) TimerEngine quit due to exception.', os.getpid())
return 1
finally:
if timer:
timer.cleanup()
return 0
def main():
try:
_setup()
return _run_worker()
except SystemExit as exit_code:
sys.exit(exit_code)
except:
LOG.exception('(PID=%s) TimerEngine quit due to exception.', os.getpid())
return 1
finally:
_teardown()
| 31.685393 | 98 | 0.723404 |
acf1e755b0cc754b559a493c07420cd721977ce9 | 76 | py | Python | models/__init__.py | lxasqjc/Foveation-Segmentation | 557ad1c88343c05d7dfc765b5fa4cbe6fb237d74 | [
"MIT"
] | 39 | 2020-07-31T16:57:42.000Z | 2022-03-28T15:38:57.000Z | models/__init__.py | lxasqjc/Foveation-Segmentation | 557ad1c88343c05d7dfc765b5fa4cbe6fb237d74 | [
"MIT"
] | 2 | 2020-11-03T03:43:13.000Z | 2021-05-05T18:05:05.000Z | models/__init__.py | lxasqjc/Foveation-Segmentation | 557ad1c88343c05d7dfc765b5fa4cbe6fb237d74 | [
"MIT"
] | 6 | 2020-11-16T07:01:28.000Z | 2022-03-28T15:39:35.000Z | from .models import ModelBuilder, SegmentationModule, FovSegmentationModule
| 38 | 75 | 0.881579 |
acf1e795e44bbd91e4f08a5ebd6255fa7a408958 | 1,025 | py | Python | examples/song.py | kcfkwok2003/m5_star_navigator | 3c2fcc8edfecc417965ce08e159745426d614a8d | [
"MIT"
] | null | null | null | examples/song.py | kcfkwok2003/m5_star_navigator | 3c2fcc8edfecc417965ce08e159745426d614a8d | [
"MIT"
] | null | null | null | examples/song.py | kcfkwok2003/m5_star_navigator | 3c2fcc8edfecc417965ce08e159745426d614a8d | [
"MIT"
] | null | null | null | #ref : https://gist.github.com/nicksort/4736535
from machine import Pin,PWM
from array import array
import time
c = 261
d = 294
e = 329
f = 349
g = 391
gS = 415
a = 440
aS = 466
b = 494
cH = 523
cSH = 554
dH = 587
dSH = 622
eH = 659
fH = 698
fSH = 740
gH = 784
gSH = 830
aH = 880
DUTY_ON=99
DUTY_OFF=0
class SONG:
def __init__(self,pinx):
global Pin,PWM
self.speaker=PWM(Pin(pinx,Pin.OUT))
self.speaker.duty(0)
def set_notes(self, notes, durations):
self.notes=notes
self.durations=durations
def play(self):
global DUTY_ON,DUTY_OFF
for i in range(len(self.notes)):
self.speaker.freq(self.notes[i])
self.speaker.duty(DUTY_ON)
time.sleep_ms(self.durations[i])
self.speaker.duty(DUTY_OFF)
time.sleep(0.05)
if __name__=='__main__':
NOTES1=[a,a,a,f,cH,a,f,cH,a]
DURATIONS1=[500,500,500,350,150,500,350,150,650]
song=SONG(25)
song.set_notes(NOTES1,DURATIONS1)
song.play()
| 18.981481 | 51 | 0.614634 |
acf1e79b005fbc6768c9665242c00e166229022e | 2,924 | py | Python | tests/test_spice.py | diagonalDave/skidl | a68207c72ecef919ca66ca8c3b00394f5809ac3b | [
"MIT"
] | 2 | 2022-02-27T14:31:52.000Z | 2022-02-27T14:31:56.000Z | tests/test_spice.py | diagonalDave/skidl | a68207c72ecef919ca66ca8c3b00394f5809ac3b | [
"MIT"
] | null | null | null | tests/test_spice.py | diagonalDave/skidl | a68207c72ecef919ca66ca8c3b00394f5809ac3b | [
"MIT"
] | null | null | null | import pytest
from skidl.common import *
from .setup_teardown import *
# Skip this test module if PySpice is missing.
pexpect = pytest.importorskip("PySpice")
from skidl.pyspice import * # isort:skip
def test_lib_import_1():
lib_search_paths[SPICE].append(r"./SpiceLib/lib")
lib = SchLib("lt1083", tool=SPICE)
assert len(lib) > 0
for p in lib.get_parts():
print(p)
def test_lib_import_2():
with pytest.raises(FileNotFoundError):
lib = SchLib("lt1074", tool=SPICE)
def test_lib_export_1():
# lib_search_paths[SPICE].append(r"C:\Program Files (x86)\LTC\LTspiceIV\lib")
set_default_tool(SPICE)
lib = SchLib("lt1083", tool=SPICE)
lib.export("my_lt1083", tool=SKIDL)
# Doesn't work because of "pyspice={...}" placed in exported library.
# my_lib = SchLib('my_lt1083', tool=SKIDL)
# assert len(lib) == len(my_lib)
def test_xspice_1():
set_default_tool(SPICE)
# Component declarations showing various XSPICE styles.
vin = sinev(offset=1.65 @ u_V, amplitude=1.65 @ u_V, frequency=100e6)
adc = Part(
"pyspice",
"A",
io="anlg_in[],dig_out[]",
model=XspiceModel(
"adc",
"adc_bridge",
in_low=0.05 @ u_V,
in_high=0.1 @ u_V,
rise_delay=1e-9 @ u_s,
fall_delay=1e-9 @ u_s,
),
tool=SKIDL,
)
buf_tmp = A(
io=["buf_in, buf_out"],
model=XspiceModel(
"buf",
"d_buffer",
rise_delay=1e-9 @ u_s,
fall_delay=1e-9 @ u_s,
input_load=1e-12 @ u_s,
),
dest=TEMPLATE,
)
buf = buf_tmp()
dac = A(
io=["dig_in[]", "anlg_out[]"],
model=XspiceModel("dac", "dac_bridge", out_low=1.0 @ u_V, out_high=3.3 @ u_V),
)
r = R(value=1 @ u_kOhm)
# Create a part with no connections to test NULL SPICE connections.
# buf2 = buf_tmp()
# buf2["buf_in"] += NC
# Connections: sine wave -> ADC -> buffer -> DAC.
vin["p, n"] += (
adc["anlg_in"][0],
gnd,
) # Attach to first pin in ADC anlg_in vector of pins.
adc["dig_out"][0] += buf[
"buf_in"
] # Attach first pin of ADC dig_out vector to buffer.
buf["buf_out"] += dac["dig_in"][
0
] # Attach buffer output to first pin of DAC dig_in vector of pins.
r["p,n"] += (
dac["anlg_out"][0],
gnd,
) # Attach first pin of DAC anlg_out vector to load resistor.
circ = generate_netlist()
print(circ)
sim = circ.simulator()
waveforms = sim.transient(step_time=0.1 @ u_ns, end_time=50 @ u_ns)
time = waveforms.time
vin = waveforms[node(vin["p"])]
vout = waveforms[node(r["p"])]
print("{:^7s}{:^7s}".format("vin", "vout"))
print("=" * 15)
for v1, v2 in zip(vin.as_ndarray(), vout.as_ndarray()):
print("{:6.2f} {:6.2f}".format(v1, v2))
| 27.074074 | 86 | 0.572503 |
acf1e7bfb38b0271bcdb24eb0ff7864f985889bb | 7,346 | py | Python | modules/mysql-connector-python/mysql/connector/__init__.py | mediabrasiltv/plugin.video.netflix | 48c7ccc0492d877bd21076140fcd5b1f8a1b31b2 | [
"MIT"
] | 1 | 2019-12-23T05:21:34.000Z | 2019-12-23T05:21:34.000Z | activate/Lib/site-packages/mysql/connector/__init__.py | Tanushree28/Cyberbullying-Detection | 3a69ade5cf068b640a0d6d1f176ff0d0e2040501 | [
"MIT"
] | 5 | 2021-03-30T14:07:38.000Z | 2021-09-22T19:31:48.000Z | activate/Lib/site-packages/mysql/connector/__init__.py | Tanushree28/Cyberbullying-Detection | 3a69ade5cf068b640a0d6d1f176ff0d0e2040501 | [
"MIT"
] | 1 | 2019-12-23T05:21:36.000Z | 2019-12-23T05:21:36.000Z | # Copyright (c) 2009, 2019, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License, version 2.0, as
# published by the Free Software Foundation.
#
# This program is also distributed with certain software (including
# but not limited to OpenSSL) that is licensed under separate terms,
# as designated in a particular file or component or in included license
# documentation. The authors of MySQL hereby grant you an
# additional permission to link the program and your derivative works
# with the separately licensed software that they have included with
# MySQL.
#
# Without limiting anything contained in the foregoing, this file,
# which is part of MySQL Connector/Python, is also subject to the
# Universal FOSS Exception, version 1.0, a copy of which can be found at
# http://oss.oracle.com/licenses/universal-foss-exception.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License, version 2.0, for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
MySQL Connector/Python - MySQL driver written in Python
"""
try:
import _mysql_connector # pylint: disable=F0401
from .connection_cext import CMySQLConnection
except ImportError:
HAVE_CEXT = False
else:
HAVE_CEXT = True
from . import version
from .connection import MySQLConnection
from .errors import ( # pylint: disable=W0622
Error, Warning, InterfaceError, DatabaseError,
NotSupportedError, DataError, IntegrityError, ProgrammingError,
OperationalError, InternalError, custom_error_exception, PoolError)
from .constants import FieldFlag, FieldType, CharacterSet, \
RefreshOption, ClientFlag
from .dbapi import (
Date, Time, Timestamp, Binary, DateFromTicks,
TimestampFromTicks, TimeFromTicks,
STRING, BINARY, NUMBER, DATETIME, ROWID,
apilevel, threadsafety, paramstyle)
from .optionfiles import read_option_files
_CONNECTION_POOLS = {}
def _get_pooled_connection(**kwargs):
"""Return a pooled MySQL connection"""
# If no pool name specified, generate one
from .pooling import (
MySQLConnectionPool, generate_pool_name,
CONNECTION_POOL_LOCK)
try:
pool_name = kwargs['pool_name']
except KeyError:
pool_name = generate_pool_name(**kwargs)
# Setup the pool, ensuring only 1 thread can update at a time
with CONNECTION_POOL_LOCK:
if pool_name not in _CONNECTION_POOLS:
_CONNECTION_POOLS[pool_name] = MySQLConnectionPool(**kwargs)
elif isinstance(_CONNECTION_POOLS[pool_name], MySQLConnectionPool):
# pool_size must be the same
check_size = _CONNECTION_POOLS[pool_name].pool_size
if ('pool_size' in kwargs
and kwargs['pool_size'] != check_size):
raise PoolError("Size can not be changed "
"for active pools.")
# Return pooled connection
try:
return _CONNECTION_POOLS[pool_name].get_connection()
except AttributeError:
raise InterfaceError(
"Failed getting connection from pool '{0}'".format(pool_name))
def _get_failover_connection(**kwargs):
"""Return a MySQL connection and try to failover if needed
An InterfaceError is raise when no MySQL is available. ValueError is
raised when the failover server configuration contains an illegal
connection argument. Supported arguments are user, password, host, port,
unix_socket and database. ValueError is also raised when the failover
argument was not provided.
Returns MySQLConnection instance.
"""
config = kwargs.copy()
try:
failover = config['failover']
except KeyError:
raise ValueError('failover argument not provided')
del config['failover']
support_cnx_args = set(
['user', 'password', 'host', 'port', 'unix_socket',
'database', 'pool_name', 'pool_size'])
# First check if we can add all use the configuration
for server in failover:
diff = set(server.keys()) - support_cnx_args
if diff:
raise ValueError(
"Unsupported connection argument {0} in failover: {1}".format(
's' if len(diff) > 1 else '',
', '.join(diff)))
for server in failover:
new_config = config.copy()
new_config.update(server)
try:
return connect(**new_config)
except Error:
# If we failed to connect, we try the next server
pass
raise InterfaceError("Could not failover: no MySQL server available")
def connect(*args, **kwargs):
"""Create or get a MySQL connection object
In its simpliest form, Connect() will open a connection to a
MySQL server and return a MySQLConnection object.
When any connection pooling arguments are given, for example pool_name
or pool_size, a pool is created or a previously one is used to return
a PooledMySQLConnection.
Returns MySQLConnection or PooledMySQLConnection.
"""
# Option files
if 'read_default_file' in kwargs:
kwargs['option_files'] = kwargs['read_default_file']
kwargs.pop('read_default_file')
if 'option_files' in kwargs:
new_config = read_option_files(**kwargs)
return connect(**new_config)
# Failover
if 'failover' in kwargs:
return _get_failover_connection(**kwargs)
# Pooled connections
try:
from .constants import CNX_POOL_ARGS
if any([key in kwargs for key in CNX_POOL_ARGS]):
return _get_pooled_connection(**kwargs)
except NameError:
# No pooling
pass
# Use C Extension by default
use_pure = kwargs.get('use_pure', False)
if 'use_pure' in kwargs:
del kwargs['use_pure'] # Remove 'use_pure' from kwargs
if not use_pure and not HAVE_CEXT:
raise ImportError("MySQL Connector/Python C Extension not "
"available")
if HAVE_CEXT and not use_pure:
return CMySQLConnection(*args, **kwargs)
return MySQLConnection(*args, **kwargs)
Connect = connect # pylint: disable=C0103
__version_info__ = version.VERSION
__version__ = version.VERSION_TEXT
__all__ = [
'MySQLConnection', 'Connect', 'custom_error_exception',
# Some useful constants
'FieldType', 'FieldFlag', 'ClientFlag', 'CharacterSet', 'RefreshOption',
'HAVE_CEXT',
# Error handling
'Error', 'Warning',
'InterfaceError', 'DatabaseError',
'NotSupportedError', 'DataError', 'IntegrityError', 'ProgrammingError',
'OperationalError', 'InternalError',
# DBAPI PEP 249 required exports
'connect', 'apilevel', 'threadsafety', 'paramstyle',
'Date', 'Time', 'Timestamp', 'Binary',
'DateFromTicks', 'DateFromTicks', 'TimestampFromTicks', 'TimeFromTicks',
'STRING', 'BINARY', 'NUMBER',
'DATETIME', 'ROWID',
# C Extension
'CMySQLConnection',
]
| 35.660194 | 79 | 0.689082 |
acf1e832ed1cbf3220165c196362bb7674228ec7 | 20,549 | py | Python | flux_mito/model_361.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | flux_mito/model_361.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | flux_mito/model_361.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | # exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('Bcl2', ['BidM', 'BaxA'])
Monomer('C3pro', ['Apop', 'C8A'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'Bcl2', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('ApafA')
Monomer('BidM', ['BaxM', 'Bcl2'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C6A', ['C8pro'])
Monomer('C6pro', ['C3A'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 42500.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('Bcl2_0', 40000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Parameter('C6A_0', 0.0)
Parameter('C6pro_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('Bcl2_obs', Bcl2())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Observable('C6A_obs', C6A())
Observable('C6pro_obs', C6pro())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None, C8A=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None, Bcl2=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None, Bcl2=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None, Bcl2=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('inhibition_0_Bcl2_inhibitor_BidM_inh_target', Bcl2(BidM=None, BaxA=None) + BidM(BaxM=None, Bcl2=None) | Bcl2(BidM=1, BaxA=None) % BidM(BaxM=None, Bcl2=1), inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr)
Rule('inhibition_0_Bcl2_inhibitor_BaxA_inh_target', Bcl2(BidM=None, BaxA=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | Bcl2(BidM=None, BaxA=1) % BaxA(BaxM=None, Bcl2=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(Apop=None, C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(Bcl2(BidM=None, BaxA=None), Bcl2_0)
Initial(C3pro(Apop=None, C8A=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None, Bcl2=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C6A(C8pro=None), C6A_0)
Initial(C6pro(C3A=None), C6pro_0)
| 95.134259 | 798 | 0.804127 |
acf1e849b0fd4542beeb3a9542d01fd7fa8202ff | 29,772 | py | Python | theano/tensor/extra_ops.py | shaibagon/Theano | b4244cfaa1c99007015bb01e859699eec3518053 | [
"BSD-3-Clause"
] | 1 | 2018-01-31T12:29:10.000Z | 2018-01-31T12:29:10.000Z | theano/tensor/extra_ops.py | AtousaTorabi/Theano_old | ba2d2f74406243112e813df31429721c791a889a | [
"BSD-3-Clause"
] | null | null | null | theano/tensor/extra_ops.py | AtousaTorabi/Theano_old | ba2d2f74406243112e813df31429721c791a889a | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import numpy
import theano
from theano.tensor import basic
from theano.tensor import nlinalg
from theano import gof, scalar
from theano.gradient import DisconnectedType
tensor = basic
class CumsumOp(theano.Op):
# See function cumsum for docstring
def __init__(self, axis=None):
self.axis = axis
def __eq__(self, other):
return (type(self) == type(other) and
self.axis == other.axis)
def __hash__(self):
return hash(type(self)) ^ hash(self.axis)
def make_node(self, x):
x = basic.as_tensor_variable(x)
out_type = x.type()
if self.axis is None:
out_type = theano.tensor.vector(dtype=x.dtype) # Flatten
return theano.Apply(self, [x], [out_type])
def perform(self, node, inputs, output_storage):
x = inputs[0]
z = output_storage[0]
z[0] = np.cumsum(x, axis=self.axis)
def grad(self, inputs, output_gradients):
[gi] = output_gradients
if self.axis is None:
return [cumsum(gi[::-1])[::-1].reshape(inputs[0].shape)]
# We need to reverse the gradients along ``self.axis``,
# compute cumsum, then reverse again
reverse_slicing = [slice(None, None, None)] * gi.ndim
reverse_slicing[self.axis] = slice(None, None, -1)
reverse_slicing = tuple(reverse_slicing)
return [cumsum(gi[reverse_slicing], self.axis)[reverse_slicing]]
def infer_shape(self, node, shapes):
if self.axis is None:
return [(tensor.prod(shapes[0]),)] # Flatten
return shapes
def c_code(self, node, name, inames, onames, sub):
x, = inames
z, = onames
axis = self.axis
fail = sub['fail']
if self.axis is None or (self.axis == 0 and node.inputs[0].ndim == 1):
code = """
npy_intp shape[1] = { PyArray_SIZE(%(x)s) };
if(!(%(z)s && PyArray_DIMS(%(z)s)[0] == shape[0]))
{
Py_XDECREF(%(z)s);
%(z)s = (PyArrayObject*) PyArray_SimpleNew(1, shape, PyArray_TYPE((PyArrayObject*) py_%(x)s));
}
if (!%(z)s)
%(fail)s;
{
PyObject * t = PyArray_CumSum(
%(x)s, NPY_MAXDIMS,
PyArray_TYPE((PyArrayObject*) py_%(x)s), %(z)s);
if (!t){
%(fail)s;
}
// Because PyArray_CumSum returns a newly created reference on t.
Py_XDECREF(t);
}
""" % locals()
else:
code = """
if(!(%(z)s && PyArray_CompareLists(PyArray_DIMS(%(z)s), PyArray_DIMS(%(x)s), PyArray_NDIM(%(x)s))))
{
Py_XDECREF(%(z)s);
%(z)s = (PyArrayObject*) PyArray_SimpleNew(PyArray_NDIM(%(x)s), PyArray_DIMS(%(x)s), PyArray_TYPE((PyArrayObject*) py_%(x)s));
}
if (!%(z)s)
%(fail)s;
{
PyObject * t = PyArray_CumSum(
%(x)s, %(axis)s,
PyArray_TYPE((PyArrayObject*) py_%(x)s), %(z)s);
if (!t){
%(fail)s;
}
// Because PyArray_CumSum returns a newly created reference on t.
Py_XDECREF(t);
}
""" % locals()
return code
def c_code_cache_version(self):
return (6,)
def __str__(self):
return "%s{%s}" % (self.__class__.__name__, self.axis)
def cumsum(x, axis=None):
"""Return the cumulative sum of the elements along a given axis.
Wraping of numpy.cumsum.
:param x: Input tensor variable.
:param axis: The axis along which the cumulative sum is computed.
The default (None) is to compute the cumsum over the flattened array.
.. versionadded:: 0.6.1
"""
return CumsumOp(axis=axis)(x)
class CumprodOp(theano.Op):
# See function cumprod for docstring
def __init__(self, axis=None):
self.axis = axis
def __eq__(self, other):
return (type(self) == type(other) and
self.axis == other.axis)
def __hash__(self):
return hash(type(self)) ^ hash(self.axis)
def make_node(self, x):
x = basic.as_tensor_variable(x)
out_type = x.type()
if self.axis is None:
out_type = theano.tensor.vector(dtype=x.dtype) # Flatten
return theano.Apply(self, [x], [out_type])
def perform(self, node, inputs, output_storage):
x = inputs[0]
z = output_storage[0]
z[0] = np.cumprod(x, axis=self.axis)
def grad(self, inputs, output_gradients):
x, = inputs
gi, = output_gradients
fx = cumprod(x, axis=self.axis)
if self.axis is None:
return [cumsum((fx * gi)[::-1])[::-1].reshape(inputs[0].shape) / x]
# We need to reverse the gradients along ``self.axis``,
# compute cumsum, then reverse again
reverse_slicing = [slice(None,None,None)] * gi.ndim
reverse_slicing[self.axis] = slice(None,None,-1)
reverse_slicing = tuple(reverse_slicing)
return [cumsum((fx * gi)[reverse_slicing], self.axis)[reverse_slicing] / x]
def infer_shape(self, node, shapes):
if self.axis is None:
return [(tensor.prod(shapes[0]),)] # Flatten
return shapes
def c_code(self, node, name, inames, onames, sub):
x, = inames
z, = onames
axis = self.axis
fail = sub['fail']
if self.axis is None or (self.axis == 0 and node.inputs[0].ndim == 1):
code = """
npy_intp shape[1] = { PyArray_SIZE(%(x)s) };
if(!(%(z)s && PyArray_DIMS(%(z)s)[0] == shape[0]))
{
Py_XDECREF(%(z)s);
%(z)s = (PyArrayObject*) PyArray_SimpleNew(1, shape, PyArray_TYPE((PyArrayObject*) py_%(x)s));
}
if (!%(z)s)
%(fail)s;
{
PyObject * t = PyArray_CumProd(
%(x)s, NPY_MAXDIMS,
PyArray_TYPE((PyArrayObject*) py_%(x)s), %(z)s);
if (!t){
%(fail)s;
}
// Because PyArray_CumSum returns a newly created reference on t.
Py_XDECREF(t);
}
""" % locals()
else:
code = """
if(!(%(z)s && PyArray_CompareLists(PyArray_DIMS(%(z)s), PyArray_DIMS(%(x)s), PyArray_NDIM(%(x)s)) ))
{
Py_XDECREF(%(z)s);
%(z)s = (PyArrayObject*) PyArray_SimpleNew(PyArray_NDIM(%(x)s), PyArray_DIMS(%(x)s), PyArray_TYPE((PyArrayObject*) py_%(x)s));
}
if (!%(z)s)
%(fail)s;
{
PyObject * t = PyArray_CumProd(
%(x)s, %(axis)s,
PyArray_TYPE((PyArrayObject*) py_%(x)s), %(z)s);
if (!t){
%(fail)s;
}
// Because PyArray_CumSum returns a newly created reference on t.
Py_XDECREF(t);
}
""" % locals()
return code
def c_code_cache_version(self):
return (4,)
def __str__(self):
return "%s{%s}" % (self.__class__.__name__, self.axis)
def cumprod(x, axis=None):
"""Return the cumulative product of the elements along a given axis.
Wraping of numpy.cumprod.
:param x: Input tensor variable.
:param axis: The axis along which the cumulative product is computed.
The default (None) is to compute the cumprod over the flattened array.
.. versionadded:: 0.6.1
"""
return CumprodOp(axis=axis)(x)
class DiffOp(theano.Op):
# See function diff for docstring
def __init__(self, n=1, axis=-1):
self.n = n
self.axis = axis
# numpy return a view in that case.
# TODO, make an optimization that remove this op in this case.
if n == 0:
self.view_map = {0: [0]}
def __eq__(self, other):
return (type(self) == type(other) and
self.n == other.n and
self.axis == other.axis)
def __hash__(self):
return hash(type(self)) ^ hash(self.n) ^ hash(self.axis)
def make_node(self, x):
x = basic.as_tensor_variable(x)
return theano.Apply(self, [x], [x.type()])
def perform(self, node, inputs, output_storage):
x = inputs[0]
z = output_storage[0]
z[0] = np.diff(x, n=self.n, axis=self.axis)
def grad(self, inputs, outputs_gradients):
inputs = inputs[0]
if inputs.ndim != 1:
raise NotImplementedError("Grad is not implemented for inputs with"
"number of dimension other than 1.")
z = outputs_gradients[0]
def _grad_helper(z):
pre = basic.concatenate([[0.], z])
app = basic.concatenate([z, [0.]])
return pre - app
for k in range(self.n):
z = _grad_helper(z)
return [z]
def infer_shape(self, node, ins_shapes):
i0_shapes = ins_shapes[0]
out_shape = list(i0_shapes)
out_shape[self.axis] = out_shape[self.axis] - self.n
return [out_shape]
def __str__(self):
return self.__class__.__name__
def diff(x, n=1, axis=-1):
"""Calculate the n-th order discrete difference along given axis.
The first order difference is given by out[i] = a[i + 1] - a[i]
along the given axis, higher order differences are calculated by
using diff recursively. Wraping of numpy.diff.
:param x: Input tensor variable.
:param n: The number of times values are differenced, default is 1.
:param axis: The axis along which the difference is taken,
default is the last axis.
.. versionadded:: 0.6
"""
return DiffOp(n=n, axis=axis)(x)
class BinCountOp(theano.Op):
# See function bincount for docstring
compatible_type = ('int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64')
"""Tuple of all compatible dtype for the parameter of this op."""
def __init__(self, minlength=None):
self.minlength = minlength
if minlength is not None:
numpy_ver = [int(n) for n in numpy.__version__.split('.')[:2]]
if not bool(numpy_ver >= [1, 6]):
raise NotImplementedError(
"BinCountOp with minlength attribute"
" requires NumPy 1.6 or higher.")
def __eq__(self, other):
return (type(self) == type(other) and
self.minlength == other.minlength)
def __hash__(self):
return hash(type(self)) ^ hash(self.minlength)
def make_node(self, x, weights):
x = basic.as_tensor_variable(x)
if x.dtype not in BinCountOp.compatible_type:
raise TypeError("Inputs dtype must be an integer.")
# Some dtypes are not supported by numpy's implementation of bincount.
# Until another one is available, we should fail at graph construction
# time, not wait for execution.
int_bitwidth = theano.gof.python_int_bitwidth()
if int_bitwidth == 64:
numpy_unsupported_dtypes = ('uint64',)
if int_bitwidth == 32:
numpy_unsupported_dtypes = ('uint32', 'int64', 'uint64')
intp_bitwidth = theano.gof.local_bitwidth()
if intp_bitwidth == 32:
out_type = basic.ivector()
elif intp_bitwidth == 64:
out_type = basic.lvector()
if x.dtype in numpy_unsupported_dtypes:
raise TypeError(
("Input dtypes %s are not supported by numpy.bincount, "
% numpy_unsupported_dtypes), x.dtype)
if x.ndim != 1:
raise TypeError("Inputs must be of dimension 1.")
if weights is None:
weights = theano.gof.Constant(theano.gof.Generic(), None)
else:
weights = basic.as_tensor_variable(weights)
out_type = basic.dvector()
if weights.ndim != 1:
raise TypeError("Weights cannot have a number of"
"dimension different of 1.")
return theano.Apply(self, [x, weights], [out_type])
def perform(self, node, inputs, output_storage):
x = inputs[0]
weights = inputs[1]
z = output_storage[0]
if weights is not None and weights.shape != x.shape:
raise TypeError("All inputs must have the same shape.")
# Needed for numpy 1.4.1 compatibility
if self.minlength:
out = np.bincount(x, weights=weights, minlength=self.minlength)
else:
out = np.bincount(x, weights=weights)
z[0] = theano._asarray(out, dtype=node.outputs[0].dtype)
def grad(self, inputs, outputs_gradients):
output = self(*inputs)
if output.dtype.find('int') != -1:
return [inp.zeros_like().astype(theano.config.floatX)
for inp in inputs]
raise NotImplementedError()
def infer_shape(self, node, ins_shapes):
x = node.inputs[0]
m = basic.max(x) + 1
if self.minlength is not None:
m = basic.maximum(m, self.minlength)
return [[m]]
def __str__(self):
return self.__class__.__name__
def bincount(x, weights=None, minlength=None):
"""Count number of occurrences of each value in array of non-negative ints.
The number of bins (of size 1) is one larger than the largest
value in x. If minlength is specified, there will be at least
this number of bins in the output array (though it will be longer
if necessary, depending on the contents of x). Each bin gives the
number of occurrences of its index value in x. If weights is
specified the input array is weighted by it, i.e. if a value n
is found at position i, out[n] += weight[i] instead of out[n] += 1.
Wraping of numpy.bincount
:param x: 1 dimension, nonnegative ints
:param weights: array of the same shape as x with corresponding weights.
Optional.
:param minlength: A minimum number of bins for the output array.
Optional.
.. versionadded:: 0.6
"""
return BinCountOp(minlength=minlength)(x, weights)
def squeeze(x):
"""Remove broadcastable dimensions from
the shape of an array.
It returns the input array, but with the
broadcastable dimensions removed. This is
always `x` itself or a view into `x`.
:param x: Input data, tensor variable.
:return: `x` without its broadcastable dimensions.
.. versionadded:: 0.6
"""
view = x.dimshuffle([i for i in range(x.ndim)
if not x.broadcastable[i]])
return view
class RepeatOp(theano.Op):
# See the repeat function for docstring
def __init__(self, axis=None):
self.axis = axis
def __eq__(self, other):
return (type(self) == type(other) and
self.axis == other.axis)
def __hash__(self):
return hash(type(self)) ^ hash(self.axis)
def make_node(self, x, repeats):
x = basic.as_tensor_variable(x)
repeats = basic.as_tensor_variable(repeats)
if repeats.dtype not in tensor.discrete_dtypes:
raise TypeError("repeats.dtype must be an integer.")
# Some dtypes are not supported by numpy's implementation of repeat.
# Until another one is available, we should fail at graph construction
# time, not wait for execution.
ptr_bitwidth = theano.gof.local_bitwidth()
if ptr_bitwidth == 64:
numpy_unsupported_dtypes = ('uint64',)
if ptr_bitwidth == 32:
numpy_unsupported_dtypes = ('uint32', 'int64', 'uint64')
if repeats.dtype in numpy_unsupported_dtypes:
raise TypeError(
("dtypes %s are not supported by numpy.repeat "
"for the 'repeats' parameter, "
% str(numpy_unsupported_dtypes)), repeats.dtype)
if self.axis is None:
broadcastable = [False]
else:
try:
const_reps = basic.get_scalar_constant_value(repeats)
except basic.NotScalarConstantError:
const_reps = None
if const_reps == 1:
broadcastable = x.broadcastable
else:
broadcastable = list(x.broadcastable)
broadcastable[self.axis] = False
out_type = theano.tensor.TensorType(x.dtype, broadcastable)
return theano.Apply(self, [x, repeats], [out_type()])
def perform(self, node, inputs, output_storage):
x = inputs[0]
repeats = inputs[1]
z = output_storage[0]
z[0] = np.repeat(x, repeats=repeats, axis=self.axis)
def connection_pattern(self, node):
return [[True], [False]]
def grad(self, (x, repeats), (gz, )):
if repeats.ndim == 0:
if self.axis is None:
axis = x.ndim
else:
if self.axis >= 0:
axis = self.axis + 1
else:
axis = self.axis + x.ndim + 1
shape = [x.shape[k] for k in range(x.ndim)]
shape.insert(axis, repeats)
return [gz.reshape(shape, x.ndim + 1).sum(axis=axis),
DisconnectedType()()]
elif repeats.ndim == 1:
# For this implementation, we would need to specify the length
# of repeats in order to split gz in the right way to sum
# the good part.
raise NotImplementedError()
else:
raise ValueError()
def infer_shape(self, node, ins_shapes):
i0_shapes = ins_shapes[0]
repeats = node.inputs[1]
out_shape = list(i0_shapes)
# uint64 shape are not supported.
dtype = None
if repeats.dtype in ['uint8', 'uint16', 'uint32']:
dtype = 'int64'
if self.axis is None:
if repeats.ndim == 0:
if len(i0_shapes) == 0:
out_shape = [repeats]
else:
res = 1
for d in i0_shapes:
res = res * d
out_shape = (res * repeats, )
else:
out_shape = [theano.tensor.sum(repeats, dtype=dtype)]
else:
if repeats.ndim == 0:
out_shape[self.axis] = out_shape[self.axis] * repeats
else:
out_shape[self.axis] = theano.tensor.sum(repeats, dtype=dtype)
return [out_shape]
def __str__(self):
return self.__class__.__name__
def repeat(x, repeats, axis=None):
"""Repeat elements of an array.
It returns an array which has the same shape as `x`, except
along the given axis. The axis is used to speficy along which
axis to repeat values. By default, use the flattened input
array, and return a flat output array.
The number of repetitions for each element is `repeat`.
`repeats` is broadcasted to fit the length of the given `axis`.
:param x: Input data, tensor variable.
:param repeats: int, scalar or tensor variable.
:param axis: int, optional.
:see: :func:`tensor.tile <tensor.tile>`
.. versionadded:: 0.6
"""
return RepeatOp(axis=axis)(x, repeats)
class Bartlett(gof.Op):
# See function bartlett for docstring
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def __str__(self):
return self.__class__.__name__
def make_node(self, M):
M = tensor.as_tensor_variable(M)
if M.ndim != 0:
raise TypeError('%s only works on scalar input'
% self.__class__.__name__)
elif (not M.dtype.startswith('int') and
not M.dtype.startswith('uint')):
# dtype is a theano attribute here
raise TypeError('%s only works on integer input'
% self.__class__.__name__)
return gof.Apply(self, [M], [tensor.dvector()])
def perform(self, node, inputs, out_):
M = inputs[0]
out, = out_
out[0] = numpy.bartlett(M)
def infer_shape(self, node, in_shapes):
temp = node.inputs[0]
M = tensor.switch(tensor.lt(temp, 0),
tensor.cast(0, temp.dtype),
temp)
return [[M]]
def grad(self, inputs, output_grads):
return [None for i in inputs]
bartlett_ = Bartlett()
# I create a function only to have the doc show well.
def bartlett(M):
"""An instance of this class returns the Bartlett spectral window in the
time-domain. The Bartlett window is very similar to a triangular window,
except that the end points are at zero. It is often used in signal
processing for tapering a signal, without generating too much ripple in
the frequency domain.
:param M: (integer scalar) Number of points in the output
window. If zero or less, an empty vector is returned.
:return: (vector of doubles) The triangular window, with the
maximum value normalized to one (the value one appears only if
the number of samples is odd), with the first and last samples
equal to zero.
.. versionadded:: 0.6
"""
return bartlett_(M)
class FillDiagonal(gof.Op):
# See function fill_diagonal for docstring
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def __str__(self):
return self.__class__.__name__
def infer_shape(self, node, in_shapes):
return [in_shapes[0]]
def make_node(self, a, val):
a = tensor.as_tensor_variable(a)
val = tensor.as_tensor_variable(val)
if a.ndim < 2:
raise TypeError('%s: first parameter must have at least'
' two dimensions' % self.__class__.__name__)
elif val.ndim != 0:
raise TypeError('%s: second parameter must be a scalar'
% self.__class__.__name__)
val = tensor.cast(val, dtype=scalar.upcast(a.dtype, val.dtype))
if val.dtype != a.dtype:
raise TypeError('%s: type of second parameter must be the same as'
' the first\'s' % self.__class__.__name__)
return gof.Apply(self, [a, val], [a.type()])
def perform(self, node, inputs, output_storage):
a = inputs[0].copy()
val = inputs[1]
if a.ndim == 2:
# numpy.fill_diagonal up to date(including 1.6.2) have a
# bug for tall matrix.
# For 2-d arrays, we accept rectangular ones.
step = a.shape[1] + 1
end = a.shape[1] * a.shape[1]
# Write the value out into the diagonal.
a.flat[:end:step] = val
else:
numpy.fill_diagonal(a, val)
output_storage[0][0] = a
def grad(self, inp, cost_grad):
"""
Note: The gradient is currently implemented for matrices
only.
"""
a, val = inp
grad = cost_grad[0]
if (a.dtype.startswith('complex')):
return [None, None]
elif a.ndim > 2:
raise NotImplementedError('%s: gradient is currently implemented'
' for matrices only' %
self.__class__.__name__)
wr_a = fill_diagonal(grad, 0) # valid for any number of dimensions
# diag is only valid for matrices
wr_val = theano.tensor.nlinalg.diag(grad).sum()
return [wr_a, wr_val]
fill_diagonal_ = FillDiagonal()
# I create a function only to have the doc show well.
def fill_diagonal(a, val):
""" Returns a copy of an array with all
elements of the main diagonal set to a specified scalar value.
:param a: Rectangular array of at least two dimensions.
:param val: Scalar value to fill the diagonal whose type must be
compatible with that of array 'a' (i.e. 'val' cannot be viewed
as an upcast of 'a').
:return: An array identical to 'a' except that its main diagonal
is filled with scalar 'val'. (For an array 'a' with a.ndim >=
2, the main diagonal is the list of locations a[i, i, ..., i]
(i.e. with indices all identical).)
Support rectangular matrix and tensor with more than 2 dimensions
if the later have all dimensions are equals.
.. versionadded:: 0.6
"""
return fill_diagonal_(a, val)
class FillDiagonalOffset(gof.Op):
# See function fill_diagonal_offset for docstring
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def __str__(self):
return self.__class__.__name__
def infer_shape(self, node, in_shapes):
return [in_shapes[0]]
def make_node(self, a, val, offset):
a = tensor.as_tensor_variable(a)
val = tensor.as_tensor_variable(val)
offset = tensor.as_tensor_variable(offset)
if a.ndim != 2:
raise TypeError('%s: first parameter must have exactly'
' two dimensions' % self.__class__.__name__)
elif val.ndim != 0:
raise TypeError('%s: second parameter must be a scalar'
% self.__class__.__name__)
elif offset.ndim != 0:
raise TypeError('%s: third parameter must be a scalar'
% self.__class__.__name__)
val = tensor.cast(val, dtype=scalar.upcast(a.dtype, val.dtype))
if val.dtype != a.dtype:
raise TypeError('%s: type of second parameter must be the same'
' as the first\'s' % self.__class__.__name__)
elif offset.dtype[:3] != 'int':
raise TypeError('%s: type of third parameter must be as integer'
' use theano.tensor.cast( input, \'int32/int64\')'
% self.__class__.__name__)
return gof.Apply(self, [a, val, offset], [a.type()])
def perform(self, node, inputs, output_storage):
a = inputs[0].copy()
val = inputs[1]
offset = inputs[2]
height, width = a.shape
"""
Note: The fill_diagonal only support rectangular matrix. The output
of tall matrix is "wrapped", which is an option in numpy 1.9.0
but was regarded as a bug in numpy 1.6.2. Here I implement the
fill_diagonal_offset with unwrapped output, so fill_diagonal_offset
supports tall matrix.(This make a little difference between the output
of fill_diagonal and fill_diagonal_offset only in the case of tall
matrix)
"""
if offset >= 0:
start = offset
num_of_step = min(min(width, height), width - offset)
else:
start = - offset * a.shape[1]
num_of_step = min(min(width, height), height + offset)
step = a.shape[1] + 1
end = start + step * num_of_step
# Write the value out into the diagonal.
a.flat[start:end:step] = val
output_storage[0][0] = a
def grad(self, inp, cost_grad):
"""
Note: The gradient is currently implemented for matrices
only.
"""
a, val, offset = inp
grad = cost_grad[0]
height, width = grad.shape
if (a.dtype.startswith('complex')):
return [None, None]
# only valid for matrices
wr_a = fill_diagonal_offset(grad, 0, offset)
offset_abs = basic.abs_(offset)
pos_offset_flag = basic.ge(offset, 0)
neg_offset_flag = basic.lt(offset, 0)
min_wh = basic.minimum(width, height)
start = offset * pos_offset_flag + offset_abs * width \
* neg_offset_flag
num_of_step = basic.minimum( min_wh, width * pos_offset_flag
+ height * neg_offset_flag - offset_abs )
step = a.shape[1] + 1
end = start + step * num_of_step
# input of slice should be integer
start = basic.cast(start,'int32')
step = basic.cast(step,'int32')
end = basic.cast(end,'int32')
wr_val = grad.flatten()[start:end:step].sum()
wr_offset = theano.gradient.grad_undefined(
self, 2, offset,
"offset is not defined for non-integer offset so"
" fill_diagonal_offset(a,val,offset+eps) is undefined")
return [wr_a, wr_val,wr_offset]
fill_diagonal_offset = FillDiagonalOffset()
""" Returns a copy of an array with all
elements of the main diagonal set to a specified scalar value.
:param a: Rectangular array of two dimensions.
:param val: Scalar value to fill the diagonal whose type must be
compatible with that of array 'a' (i.e. 'val' cannot be viewed
as an upcast of 'a').
:params offset : Scalar value Offset of the diagonal from the main
diagonal. Can be positive or negative integer.
:return: An array identical to 'a' except that its offset diagonal
is filled with scalar 'val'. The output is unwrapped.
"""
| 33.793417 | 146 | 0.566203 |
acf1e8b22b524c95c9953f779991c8f0020056b5 | 5,076 | py | Python | 07_train/archive/src/xgboost_reviews.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 2,327 | 2020-03-01T09:47:34.000Z | 2021-11-25T12:38:42.000Z | 07_train/archive/src/xgboost_reviews.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 209 | 2020-03-01T17:14:12.000Z | 2021-11-08T20:35:42.000Z | 07_train/archive/src/xgboost_reviews.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 686 | 2020-03-03T17:24:51.000Z | 2021-11-25T23:39:12.000Z | import os
import argparse
import pickle as pkl
import pandas as pd
from sklearn.metrics import accuracy_score, precision_score, classification_report, confusion_matrix
from sklearn import metrics
from sklearn.base import BaseEstimator, TransformerMixin
import nltk
import re
import xgboost as xgb
from xgboost import XGBClassifier
import glob
def load_dataset(path, sep, header):
data = pd.concat([pd.read_csv(f, sep=sep, header=header) for f in glob.glob('{}/*.csv'.format(path))], ignore_index = True)
labels = data.iloc[:,0]
features = data.drop(data.columns[0], axis=1)
if header==None:
# Adjust the column names after dropped the 0th column above
# New column names are 0 (inclusive) to len(features.columns) (exclusive)
new_column_names = list(range(0, len(features.columns)))
features.columns = new_column_names
return features, labels
def model_fn(model_dir):
"""
:param: model_dir The directory where model files are stored.
:return: a model
"""
# IsADirectoryError: [Errno 21] Is a directory: '/opt/ml/model'
import os
list_dirs = os.listdir(model_dir)
for file in dirs:
print(file)
model = pkl.load(open(model_dir, 'rb'))
print(type(model))
return model
def input_fn(request_body, request_content_type):
"""
Deserialize the Invoke request body into an object we can perform prediction on
"""
"""An input_fn that loads a pickled object"""
if request_content_type == "application/json":
pass
else:
# Handle other content-types here or raise an Exception
# if the content type is not supported.
pass
print(request_body)
return [1]
def predict_fn(input_object, model):
"""
Perform prediction on the deserialized object, with the loaded model
"""
return [1]
def output_fn(output, output_content_type):
"""
Serialize the prediction result into the desired response content type
"""
#return json.dumps({'output':output.reshape(-1).tolist()}), output_content_type
print(output)
return [1]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--objective', type=str, default='binary:logistic')
parser.add_argument('--max-depth', type=int, default=5)
parser.add_argument('--num-round', type=int, default=1)
parser.add_argument('--train-data', type=str, default=os.environ['SM_CHANNEL_TRAIN'])
parser.add_argument('--validation-data', type=str, default=os.environ['SM_CHANNEL_VALIDATION'])
parser.add_argument('--model-dir', type=str, default=os.environ['SM_MODEL_DIR'])
args, _ = parser.parse_known_args()
objective = args.objective
max_depth = args.max_depth
num_round = args.num_round
train_data = args.train_data
validation_data = args.validation_data
model_dir = args.model_dir
# Load transformed features (is_positive_sentiment, f0, f1, ...)
X_train, y_train = load_dataset(train_data, ',', header=None)
X_validation, y_validation = load_dataset(validation_data, ',', header=None)
xgb_estimator = XGBClassifier(objective=objective,
num_round=num_round,
max_depth=max_depth)
xgb_estimator.fit(X_train, y_train)
# TODO: use the model_dir that is passed in through args
# (currently SM_MODEL_DIR)
os.makedirs(model_dir, exist_ok=True)
model_path = os.path.join(model_dir, 'xgboost-model')
pkl.dump(xgb_estimator, open(model_path, 'wb'))
print('Wrote model to {}'.format(model_path))
xgb_estimator_restored = pkl.load(open(model_path, 'rb'))
type(xgb_estimator_restored)
preds_validation = xgb_estimator_restored.predict(X_validation)
print('Validation Accuracy: ', accuracy_score(y_validation, preds_validation))
print('Validation Precision: ', precision_score(y_validation, preds_validation, average=None))
print(classification_report(y_validation, preds_validation))
# TODO: Convert to preds_validation_0_or_1
##############
# Note: roc_auc is causing the following:
# ValueError: multiclass format is not supported
# Traceback (most recent call last):
# File "/miniconda3/lib/python3.6/runpy.py", line 193, in _run_module_as_main
# "__main__", mod_spec)
# File "/miniconda3/lib/python3.6/runpy.py", line 85, in _run_code
# exec(code, run_globals)
# File "/opt/ml/code/xgboost_reviews.py", line 75, in <module>
# auc = round(metrics.roc_auc_score(y_validation, preds_validation), 4)
# File "/miniconda3/lib/python3.6/site-packages/sklearn/metrics/ranking.py", line 356, in roc_auc_score
# sample_weight=sample_weight)
# File "/miniconda3/lib/python3.6/site-packages/sklearn/metrics/base.py", line 74, in _average_binary_score
# raise ValueError("{0} format is not supported".format(y_type))
# auc = round(metrics.roc_auc_score(y_validation, preds_validation), 4)
# print('AUC is ' + repr(auc))
| 35.496503 | 127 | 0.692868 |
acf1e9532b7ac771f3df57c8b67c24d73d5c6aee | 1,721 | py | Python | pylib/rtstats_util.py | mustbei/rtstats | f1466c2af7b0ffa1c1433ede8276f915008d0170 | [
"Apache-2.0"
] | 2 | 2017-02-09T18:59:50.000Z | 2017-02-09T19:07:36.000Z | pylib/rtstats_util.py | mustbei/rtstats | f1466c2af7b0ffa1c1433ede8276f915008d0170 | [
"Apache-2.0"
] | 8 | 2016-10-04T14:01:01.000Z | 2017-04-20T12:39:40.000Z | pylib/rtstats_util.py | mustbei/rtstats | f1466c2af7b0ffa1c1433ede8276f915008d0170 | [
"Apache-2.0"
] | 2 | 2019-04-01T19:01:52.000Z | 2019-11-22T20:26:31.000Z | """Collection of Utilities for rtstats"""
import json
import os
os.environ["MPLCONFIGDIR"] = "/tmp" # hack
import psycopg2
import matplotlib
from matplotlib.ticker import FuncFormatter
import matplotlib.dates as mdates
matplotlib.use("agg")
import matplotlib.pyplot as plt
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
def get_config():
"""Return a dict() of our runtime configuration"""
fn = "%s/settings.json" % (
os.path.join(os.path.dirname(__file__), "../config"),
)
return json.load(open(fn))
def get_dbconn(rw=False):
"""return a database connection"""
config = get_config()
dbopts = config["databaserw" if rw is True else "databasero"]
return psycopg2.connect(
dbname=dbopts["name"],
host=dbopts["host"],
user=dbopts["user"],
password=dbopts["password"],
)
def fancy_labels(ax):
"""Make matplotlib date axis labels great again"""
xlim = ax.get_xlim()
days = xlim[1] - xlim[0]
daily = True
if days < 4:
daily = False
ax.xaxis.set_major_locator(mdates.HourLocator(range(0, 24, 4)))
elif days < 31:
ax.xaxis.set_major_locator(mdates.DayLocator([1, 8, 15, 22, 29]))
elif days < 63:
ax.xaxis.set_major_locator(mdates.DayLocator([1, 15]))
else:
ax.xaxis.set_major_locator(mdates.DayLocator([1]))
def my_formatter(x, pos=None):
x = mdates.num2date(x)
if daily:
fmt = "%-d %b"
elif pos == 0 or x.hour == 0:
fmt = "%-Hz\n%-d %b"
else:
fmt = "%-H"
return x.strftime(fmt)
ax.xaxis.set_major_formatter(FuncFormatter(my_formatter))
| 25.308824 | 73 | 0.629866 |
acf1e99ee8ad9d9be3388dd75151ca472d6eb644 | 2,464 | py | Python | data/p4VQE/R4/benchmark/startQiskit422.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p4VQE/R4/benchmark/startQiskit422.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p4VQE/R4/benchmark/startQiskit422.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=3
# total number=11
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=8
prog.h(input_qubit[2]) # number=3
prog.x(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=4
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[1],input_qubit[0]) # number=6
prog.cx(input_qubit[1],input_qubit[0]) # number=9
prog.cx(input_qubit[1],input_qubit[0]) # number=10
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =3962
writefile = open("../data/startQiskit422.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('qasm_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| 27.377778 | 118 | 0.634334 |
acf1e9db1432a3fd3354375927c0b3ce07da98e7 | 257 | py | Python | parsec/commands/cmd_datatypes.py | simonbray/parsec | c0e123cbf7cb1289ec722357a6262f716575e4d9 | [
"Apache-2.0"
] | 8 | 2015-03-27T17:09:15.000Z | 2021-07-13T15:33:02.000Z | parsec/commands/cmd_datatypes.py | simonbray/parsec | c0e123cbf7cb1289ec722357a6262f716575e4d9 | [
"Apache-2.0"
] | 30 | 2015-02-27T21:21:47.000Z | 2021-08-31T14:19:55.000Z | parsec/commands/cmd_datatypes.py | simonbray/parsec | c0e123cbf7cb1289ec722357a6262f716575e4d9 | [
"Apache-2.0"
] | 12 | 2017-06-01T03:49:23.000Z | 2021-07-13T15:33:06.000Z | import click
from parsec.commands.datatypes.get_datatypes import cli as get_datatypes
from parsec.commands.datatypes.get_sniffers import cli as get_sniffers
@click.group()
def cli():
pass
cli.add_command(get_datatypes)
cli.add_command(get_sniffers)
| 19.769231 | 72 | 0.817121 |
acf1e9e8c383681c6fb998eed235855302ac87f9 | 16,692 | py | Python | ansible/venv/lib/python2.7/site-packages/ansible/modules/cloud/amazon/aws_batch_compute_environment.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 17 | 2017-06-07T23:15:01.000Z | 2021-08-30T14:32:36.000Z | ansible/venv/lib/python2.7/site-packages/ansible/modules/cloud/amazon/aws_batch_compute_environment.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 9 | 2017-06-25T03:31:52.000Z | 2021-05-17T23:43:12.000Z | ansible/venv/lib/python2.7/site-packages/ansible/modules/cloud/amazon/aws_batch_compute_environment.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 3 | 2018-05-26T21:31:22.000Z | 2019-09-28T17:00:45.000Z | #!/usr/bin/python
# Copyright (c) 2017 Jon Meran <jonathan.meran@sonos.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aws_batch_compute_environment
short_description: Manage AWS Batch Compute Environments
description:
- This module allows the management of AWS Batch Compute Environments.
It is idempotent and supports "Check" mode. Use module M(aws_batch_compute_environment) to manage the compute
environment, M(aws_batch_job_queue) to manage job queues, M(aws_batch_job_definition) to manage job definitions.
version_added: "2.5"
author: Jon Meran (@jonmer85)
options:
compute_environment_name:
description:
- The name for your compute environment. Up to 128 letters (uppercase and lowercase), numbers, and underscores
are allowed.
required: true
type:
description:
- The type of the compute environment.
required: true
choices: ["MANAGED", "UNMANAGED"]
state:
description:
- Describes the desired state.
default: "present"
choices: ["present", "absent"]
compute_environment_state:
description:
- The state of the compute environment. If the state is ENABLED, then the compute environment accepts jobs
from a queue and can scale out automatically based on queues.
default: "ENABLED"
choices: ["ENABLED", "DISABLED"]
service_role:
description:
- The full Amazon Resource Name (ARN) of the IAM role that allows AWS Batch to make calls to other AWS
services on your behalf.
required: true
compute_resource_type:
description:
- The type of compute resource.
required: true
choices: ["EC2", "SPOT"]
minv_cpus:
description:
- The minimum number of EC2 vCPUs that an environment should maintain.
required: true
maxv_cpus:
description:
- The maximum number of EC2 vCPUs that an environment can reach.
required: true
desiredv_cpus:
description:
- The desired number of EC2 vCPUS in the compute environment.
instance_types:
description:
- The instance types that may be launched.
required: true
image_id:
description:
- The Amazon Machine Image (AMI) ID used for instances launched in the compute environment.
subnets:
description:
- The VPC subnets into which the compute resources are launched.
required: true
security_group_ids:
description:
- The EC2 security groups that are associated with instances launched in the compute environment.
required: true
ec2_key_pair:
description:
- The EC2 key pair that is used for instances launched in the compute environment.
instance_role:
description:
- The Amazon ECS instance role applied to Amazon EC2 instances in a compute environment.
required: true
tags:
description:
- Key-value pair tags to be applied to resources that are launched in the compute environment.
bid_percentage:
description:
- The minimum percentage that a Spot Instance price must be when compared with the On-Demand price for that
instance type before instances are launched. For example, if your bid percentage is 20%, then the Spot price
must be below 20% of the current On-Demand price for that EC2 instance.
spot_iam_fleet_role:
description:
- The Amazon Resource Name (ARN) of the Amazon EC2 Spot Fleet IAM role applied to a SPOT compute environment.
requirements:
- boto3
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
---
- hosts: localhost
gather_facts: no
vars:
state: present
tasks:
- name: My Batch Compute Environment
aws_batch_compute_environment:
compute_environment_name: computeEnvironmentName
state: present
region: us-east-1
compute_environment_state: ENABLED
type: MANAGED
compute_resource_type: EC2
minv_cpus: 0
maxv_cpus: 2
desiredv_cpus: 1
instance_types:
- optimal
subnets:
- my-subnet1
- my-subnet2
security_group_ids:
- my-sg1
- my-sg2
instance_role: arn:aws:iam::<account>:instance-profile/<role>
tags:
tag1: value1
tag2: value2
service_role: arn:aws:iam::<account>:role/service-role/<role>
register: aws_batch_compute_environment_action
- name: show results
debug:
var: aws_batch_compute_environment_action
'''
RETURN = '''
---
output:
description: "returns what action was taken, whether something was changed, invocation and response"
returned: always
sample:
batch_compute_environment_action: none
changed: false
invocation:
module_args:
aws_access_key: ~
aws_secret_key: ~
bid_percentage: ~
compute_environment_name: <name>
compute_environment_state: ENABLED
compute_resource_type: EC2
desiredv_cpus: 0
ec2_key_pair: ~
ec2_url: ~
image_id: ~
instance_role: "arn:aws:iam::..."
instance_types:
- optimal
maxv_cpus: 8
minv_cpus: 0
profile: ~
region: us-east-1
security_group_ids:
- "*******"
security_token: ~
service_role: "arn:aws:iam::...."
spot_iam_fleet_role: ~
state: present
subnets:
- "******"
tags:
Environment: <name>
Name: <name>
type: MANAGED
validate_certs: true
response:
computeEnvironmentArn: "arn:aws:batch:...."
computeEnvironmentName: <name>
computeResources:
desiredvCpus: 0
instanceRole: "arn:aws:iam::..."
instanceTypes:
- optimal
maxvCpus: 8
minvCpus: 0
securityGroupIds:
- "******"
subnets:
- "*******"
tags:
Environment: <name>
Name: <name>
type: EC2
ecsClusterArn: "arn:aws:ecs:....."
serviceRole: "arn:aws:iam::..."
state: ENABLED
status: VALID
statusReason: "ComputeEnvironment Healthy"
type: MANAGED
type: dict
'''
from ansible.module_utils._text import to_native
from ansible.module_utils.aws.batch import AWSConnection
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, HAS_BOTO3
from ansible.module_utils.ec2 import snake_dict_to_camel_dict, camel_dict_to_snake_dict
import re
import traceback
try:
from botocore.exceptions import ClientError, ParamValidationError, MissingParametersError
except ImportError:
pass # Handled by HAS_BOTO3
# ---------------------------------------------------------------------------------------------------
#
# Helper Functions & classes
#
# ---------------------------------------------------------------------------------------------------
def set_api_params(module, module_params):
"""
Sets module parameters to those expected by the boto3 API.
:param module:
:param module_params:
:return:
"""
api_params = dict((k, v) for k, v in dict(module.params).items() if k in module_params and v is not None)
return snake_dict_to_camel_dict(api_params)
def validate_params(module, aws):
"""
Performs basic parameter validation.
:param module:
:param aws:
:return:
"""
compute_environment_name = module.params['compute_environment_name']
# validate compute environment name
if not re.search(r'^[\w\_:]+$', compute_environment_name):
module.fail_json(
msg="Function compute_environment_name {0} is invalid. Names must contain only alphanumeric characters "
"and underscores.".format(compute_environment_name)
)
if not compute_environment_name.startswith('arn:aws:batch:'):
if len(compute_environment_name) > 128:
module.fail_json(msg='compute_environment_name "{0}" exceeds 128 character limit'
.format(compute_environment_name))
return
# ---------------------------------------------------------------------------------------------------
#
# Batch Compute Environment functions
#
# ---------------------------------------------------------------------------------------------------
def get_current_compute_environment(module, connection):
try:
environments = connection.client().describe_compute_environments(
computeEnvironments=[module.params['compute_environment_name']]
)
if len(environments['computeEnvironments']) > 0:
return environments['computeEnvironments'][0]
else:
return None
except ClientError:
return None
def create_compute_environment(module, aws):
"""
Adds a Batch compute environment
:param module:
:param aws:
:return:
"""
client = aws.client('batch')
changed = False
# set API parameters
params = (
'compute_environment_name', 'type', 'service_role')
api_params = set_api_params(module, params)
if module.params['compute_environment_state'] is not None:
api_params['state'] = module.params['compute_environment_state']
compute_resources_param_list = ('minv_cpus', 'maxv_cpus', 'desiredv_cpus', 'instance_types', 'image_id', 'subnets',
'security_group_ids', 'ec2_key_pair', 'instance_role', 'tags', 'bid_percentage',
'spot_iam_fleet_role')
compute_resources_params = set_api_params(module, compute_resources_param_list)
if module.params['compute_resource_type'] is not None:
compute_resources_params['type'] = module.params['compute_resource_type']
# if module.params['minv_cpus'] is not None:
# compute_resources_params['minvCpus'] = module.params['minv_cpus']
api_params['computeResources'] = compute_resources_params
try:
if not module.check_mode:
client.create_compute_environment(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error creating compute environment: {0}'.format(to_native(e)),
exception=traceback.format_exc())
return changed
def remove_compute_environment(module, aws):
"""
Remove a Batch compute environment
:param module:
:param aws:
:return:
"""
client = aws.client('batch')
changed = False
# set API parameters
api_params = {'computeEnvironment': module.params['compute_environment_name']}
try:
if not module.check_mode:
client.delete_compute_environment(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error removing compute environment: {0}'.format(to_native(e)),
exception=traceback.format_exc())
return changed
def manage_state(module, aws):
changed = False
current_state = 'absent'
state = module.params['state']
compute_environment_state = module.params['compute_environment_state']
compute_environment_name = module.params['compute_environment_name']
service_role = module.params['service_role']
minv_cpus = module.params['minv_cpus']
maxv_cpus = module.params['maxv_cpus']
desiredv_cpus = module.params['desiredv_cpus']
action_taken = 'none'
update_env_response = ''
check_mode = module.check_mode
# check if the compute environment exists
current_compute_environment = get_current_compute_environment(module, aws)
response = current_compute_environment
if current_compute_environment:
current_state = 'present'
if state == 'present':
if current_state == 'present':
updates = False
# Update Batch Compute Environment configuration
compute_kwargs = {'computeEnvironment': compute_environment_name}
# Update configuration if needed
compute_resources = {}
if compute_environment_state and current_compute_environment['state'] != compute_environment_state:
compute_kwargs.update({'state': compute_environment_state})
updates = True
if service_role and current_compute_environment['serviceRole'] != service_role:
compute_kwargs.update({'serviceRole': service_role})
updates = True
if minv_cpus is not None and current_compute_environment['computeResources']['minvCpus'] != minv_cpus:
compute_resources['minvCpus'] = minv_cpus
if maxv_cpus is not None and current_compute_environment['computeResources']['maxvCpus'] != maxv_cpus:
compute_resources['maxvCpus'] = maxv_cpus
if desiredv_cpus is not None and current_compute_environment['computeResources']['desiredvCpus'] != desiredv_cpus:
compute_resources['desiredvCpus'] = desiredv_cpus
if len(compute_resources) > 0:
compute_kwargs['computeResources'] = compute_resources
updates = True
if updates:
try:
if not check_mode:
update_env_response = aws.client().update_compute_environment(**compute_kwargs)
if not update_env_response:
module.fail_json(msg='Unable to get compute environment information after creating')
changed = True
action_taken = "updated"
except (ParamValidationError, ClientError) as e:
module.fail_json(msg="Unable to update environment: {0}".format(to_native(e)),
exception=traceback.format_exc())
else:
# Create Batch Compute Environment
changed = create_compute_environment(module, aws)
# Describe compute environment
action_taken = 'added'
response = get_current_compute_environment(module, aws)
if not response:
module.fail_json(msg='Unable to get compute environment information after creating')
else:
if current_state == 'present':
# remove the compute environment
changed = remove_compute_environment(module, aws)
action_taken = 'deleted'
return dict(changed=changed, batch_compute_environment_action=action_taken, response=response)
# ---------------------------------------------------------------------------------------------------
#
# MAIN
#
# ---------------------------------------------------------------------------------------------------
def main():
"""
Main entry point.
:return dict: changed, batch_compute_environment_action, response
"""
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
state=dict(default='present', choices=['present', 'absent']),
compute_environment_name=dict(required=True),
type=dict(required=True, choices=['MANAGED', 'UNMANAGED']),
compute_environment_state=dict(required=False, default='ENABLED', choices=['ENABLED', 'DISABLED']),
service_role=dict(required=True),
compute_resource_type=dict(required=True, choices=['EC2', 'SPOT']),
minv_cpus=dict(type='int', required=True),
maxv_cpus=dict(type='int', required=True),
desiredv_cpus=dict(type='int'),
instance_types=dict(type='list', required=True),
image_id=dict(),
subnets=dict(type='list', required=True),
security_group_ids=dict(type='list', required=True),
ec2_key_pair=dict(),
instance_role=dict(required=True),
tags=dict(type='dict'),
bid_percentage=dict(type='int'),
spot_iam_fleet_role=dict(),
region=dict(aliases=['aws_region', 'ec2_region'])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
# validate dependencies
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required for this module.')
aws = AWSConnection(module, ['batch'])
validate_params(module, aws)
results = manage_state(module, aws)
module.exit_json(**camel_dict_to_snake_dict(results, ignore_list=['Tags']))
if __name__ == '__main__':
main()
| 33.184891 | 126 | 0.63126 |
acf1ea2547c6d60a46a742467f1b5ad040a33873 | 6,966 | py | Python | resonate-carla/leaderboard/team_code/plotter.py | scope-lab-vu/Resonate-Dynamic-Risk | 46972bdb0a2b6b08cc188a9f1f6567971c9d263d | [
"MIT"
] | 3 | 2021-08-15T05:02:17.000Z | 2022-03-16T11:25:45.000Z | resonate-carla/leaderboard/team_code/plotter.py | scope-lab-vu/Resonate-Dynamic-Risk | 46972bdb0a2b6b08cc188a9f1f6567971c9d263d | [
"MIT"
] | null | null | null | resonate-carla/leaderboard/team_code/plotter.py | scope-lab-vu/Resonate-Dynamic-Risk | 46972bdb0a2b6b08cc188a9f1f6567971c9d263d | [
"MIT"
] | 2 | 2021-03-21T02:35:17.000Z | 2021-06-02T22:40:07.000Z | import cv2
import glob
import numpy as np
import scipy as sp
from matplotlib import *
from pylab import *
import time
import os
from sklearn.utils import shuffle
from keras.models import model_from_json
from keras.losses import mse
import csv
import pandas as pd
import re
from itertools import cycle
def extract_run_path(path):
runs_path = []
for run_data in glob.glob(path +"/run*.csv"):
runs_path.append(run_data)
#runs_path.sort(reverse=True)
# for i in range(1,runs+1):
# runs_path.append(path + 'run%d.csv'%i)
#print(runs_path)
return runs_path
def extract_collision_data(path):
collision_path = []
for collision_data in glob.glob(path +"/data*.txt"):
collision_path.append(collision_data)
collision_path.sort(reverse=False)
print(collision_path)
final_colisions = []
for j in range(len(collision_path)):
file1 = open(collision_path[j], 'r')
Lines = file1.readlines()
count = 0
data = []
collisions = []
col = []
for line in Lines:
data.append(line.strip())
for i in range(len(data)):
number = []
if(data[i]!= "" and data[i][0].isdigit()):
for k in range(len(data[i])):
if(data[i][k].isdigit()):
number.append(data[i][k])
elif(data[i][k] == " "):
break
collisions.append(number)
for x in range(len(collisions)):
col.append("".join(collisions[x]))
final_colisions.append(col)
#print(final_colisions)
return final_colisions
def extract_fault_data(fault_data_path):
fault_data = []
with open(fault_data_path, 'r') as readFile:
reader = csv.reader(readFile)
next(reader)
for row in reader:
data = []
data.append(row[0])
if(int(row[0])==1):
data1 = row[1].strip().split(',')
data1[0] = data1[0][1:]
data1[len(data1)-1]=data1[len(data1)-1][:-1]
data2 = row[2].strip().split(',')
data2[0] = data2[0][1:]
data2[len(data2)-1]=data2[len(data2)-1][:-1]
data3 = row[3].strip().split(',')
data3[0] = data3[0][1:]
data3[len(data3)-1]=data3[len(data3)-1][:-1]
data.append(float(data1[0])/20)
data.append(float(data1[0])/20 + float(data2[0])/20)
data.append(data3[0])
fault_data.append(data)
if(int(row[0])> 1):
data1 = row[1].strip().split(',')
data1[0] = data1[0][1:]
data1[len(data1)-1]=data1[len(data1)-1][:-1]
data2 = row[2].strip().split(',')
data2[0] = data2[0][1:]
data2[len(data2)-1]=data2[len(data2)-1][:-1]
data3 = row[3].strip().split(',')
data3[0] = data3[0][1:]
data3[len(data3)-1]=data3[len(data3)-1][:-1]
data.append(float(data1[0])/20)
data.append(float(data1[len(data1)-1])/20)
data.append(float(data1[0])/20 + float(data2[0])/20)
data.append(float(data1[len(data2)-1])/20 + float(data2[len(data2)-1])/20)
data.append(data3[0])
data.append(data3[len(data3)-1])
fault_data.append(data)
#print(fault_data)
return fault_data
def extract_weather_data(weather_path):
weather_data = []
with open(weather_path, 'r') as readFile:
reader = csv.reader(readFile)
for row in reader:
weather_data.append(row)
#print(weather_data)
return weather_data
def plot(runs_path,weather_data,collision_times,fault_data,l,path):
risk = []
mval = []
steps = []
time = []
with open(runs_path, 'r') as readFile:
reader = csv.reader(readFile)
next(reader)
for row in reader:
steps.append(float(row[0]))
time.append(float(row[0])/20.0)
risk.append(float(row[2]))
mval.append(float(row[1]))
fig, ax1 = plt.subplots()
color = 'tab:red'
ax1.set_xlabel('Time (s)')
ax1.set_ylabel('Risk Score', color=color)
ax1.set_ylim([0, 1])
x=0
cycol = cycle('bgrcmk')
if(len(collision_times)!=0):
for xc in collision_times:
ax1.axvline(x=float(xc),linewidth = 2, linestyle ="--", color ='green', label="collision" if x == 0 else "")
x+=1
if(fault_data[0]=="1"):
ax1.axvspan(fault_data[1],fault_data[2], alpha=0.2, color = 'yellow', label = "fault %s"%fault_data[3])
if(fault_data[0] > "1"):
ax1.axvspan(fault_data[1],fault_data[3], alpha=0.2, color = next(cycol), label = "fault %s"%fault_data[5])
ax1.axvspan(fault_data[2],fault_data[4], alpha=0.2, color = next(cycol), label = "fault %s"%fault_data[6])
ax1.plot(time, risk, color=color, label= 'risk')
ax1.tick_params(axis='y', labelcolor=color)
#ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis
#color = 'tab:blue'
#ax2.set_ylabel('Monitor_result', color=color)
#ax2.plot(time, mval, color=color, label = 'monitor results')
#ax2.set_ylim([-5, 40])
#ax2.tick_params(axis='y', labelcolor=color)
#ax2.set_title("Scene with Cloud:%s, Precip:%s, Precip-deposit:%s"%(weather_data[4],weather_data[2],weather_data[3]))
fig.legend(loc=8, bbox_to_anchor=(0.5, -0.02),fancybox=True, shadow=True, ncol=4)
#fig.legend(loc='upper center', bbox_to_anchor=(0.5, -0.05),fancybox=True, shadow=True, ncol=2)
fig.subplots_adjust(bottom=0.5)
fig.tight_layout() # otherwise the right y-label is slightly clipped
fig.savefig(path+'/run%d.png'%(l), bbox_inches='tight')
plt.cla()
#plt.show()
#if __name__ == '__main__':
# #collision_times = [[20,46],[37],[17,20,42,45,46]]
# runs = int(input("Enter the simulation run to be plotted:"))
# path = "/home/scope/Carla/ICCPS_CARLA_challenge/leaderboard/data/my_data/simulation%d/"%runs
# weather_path = path + "simulation_data.csv" #"/home/scope/Carla/ICCPS_CARLA_challenge/leaderboard/data/my_data/simulation%d/simulation_data.csv"%runs
# fault_data_path = path + "fault_data.csv" #"/home/scope/Carla/ICCPS_CARLA_challenge/leaderboard/data/my_data/simulation%d/fault_data.csv"%runs
# collision_times = extract_collision_data(path)
# fault_data = extract_fault_data(fault_data_path)
# runs_path = extract_run_path(path)
# weather_data = extract_weather_data(weather_path)
# for i in range(len(runs_path)):
# plot(runs_path[i],weather_data[i+1],collision_times[i],fault_data[i])
| 39.805714 | 156 | 0.570772 |
acf1ea69e7ae29aac00fe565f76146fe1c62c86b | 696 | py | Python | raidcal/maincal/sampledata.py | katajakasa/Raidcal | 61d17d8ca0c58c09debb31e868afdd377a87f38d | [
"MIT"
] | null | null | null | raidcal/maincal/sampledata.py | katajakasa/Raidcal | 61d17d8ca0c58c09debb31e868afdd377a87f38d | [
"MIT"
] | null | null | null | raidcal/maincal/sampledata.py | katajakasa/Raidcal | 61d17d8ca0c58c09debb31e868afdd377a87f38d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from raidcal.maincal.models import Topic, Message
from django.contrib.auth.models import User
def generate_sampledata(options):
threads = int(options.get('thread_count', 10))
posts = int(options.get('post_count', 10))
user = User.objects.get(pk=1)
for m in xrange(0, threads):
topic = Topic()
topic.title = u'Lorem ipsum dolor sit amet'
topic.user = user
topic.save()
for k in xrange(0, posts):
message = Message()
message.topic = topic
message.user = user
message.content = u'Lorem Ipsum dolor sit amet <a href="asdasd">aasdasdasd</a>'
message.save()
| 30.26087 | 91 | 0.606322 |
acf1eab4f9e097717769a0dfc91c66f338c1b7aa | 2,298 | py | Python | test/functional/wallet_bumpfee_totalfee_deprecation.py | rebitcoin/rebitcoin | 87a39edad2362c3f196a98d4703f6f23cde9c13e | [
"MIT"
] | 1 | 2019-12-05T08:16:41.000Z | 2019-12-05T08:16:41.000Z | test/functional/wallet_bumpfee_totalfee_deprecation.py | rebitcoin/rebitcoin | 87a39edad2362c3f196a98d4703f6f23cde9c13e | [
"MIT"
] | null | null | null | test/functional/wallet_bumpfee_totalfee_deprecation.py | rebitcoin/rebitcoin | 87a39edad2362c3f196a98d4703f6f23cde9c13e | [
"MIT"
] | 1 | 2020-11-04T06:59:41.000Z | 2020-11-04T06:59:41.000Z | #!/usr/bin/env python3
# Copyright (c) 2019 The ReBitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test deprecation of passing `totalFee` to the bumpfee RPC."""
from decimal import Decimal
from test_framework.messages import BIP125_SEQUENCE_NUMBER
from test_framework.test_framework import ReBitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
class BumpFeeWithTotalFeeArgumentDeprecationTest(ReBitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [[
"-walletrbf={}".format(i),
"-mintxfee=0.00002",
] for i in range(self.num_nodes)]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
peer_node, rbf_node = self.nodes
peer_node.generate(110)
self.sync_all()
peer_node.sendtoaddress(rbf_node.getnewaddress(), 0.001)
self.sync_all()
peer_node.generate(1)
self.sync_all()
rbfid = spend_one_input(rbf_node, peer_node.getnewaddress())
self.log.info("Testing bumpfee with totalFee argument raises RPC error with deprecation message")
assert_raises_rpc_error(
-8,
"totalFee argument has been deprecated and will be removed in 0.20. " +
"Please use -deprecatedrpc=totalFee to continue using this argument until removal.",
rbf_node.bumpfee, rbfid, {"totalFee": 2000})
self.log.info("Testing bumpfee without totalFee argument does not raise")
rbf_node.bumpfee(rbfid)
def spend_one_input(node, dest_address, change_size=Decimal("0.00049000")):
tx_input = dict(sequence=BIP125_SEQUENCE_NUMBER,
**next(u for u in node.listunspent() if u["amount"] == Decimal("0.00100000")))
destinations = {dest_address: Decimal("0.00050000")}
destinations[node.getrawchangeaddress()] = change_size
rawtx = node.createrawtransaction([tx_input], destinations)
signedtx = node.signrawtransactionwithwallet(rawtx)
txid = node.sendrawtransaction(signedtx["hex"])
return txid
if __name__ == "__main__":
BumpFeeWithTotalFeeArgumentDeprecationTest().main()
| 41.781818 | 105 | 0.707572 |
acf1ead186cbf6f677f16682cbc74e84365c9134 | 5,982 | py | Python | homeassistant/components/sensor/sma.py | dannyqwertz/home-assistant | 688bdc6532e514afbdc8efd1f574a7b5c9e8d280 | [
"Apache-2.0"
] | 4 | 2019-01-10T14:47:54.000Z | 2021-04-22T02:06:27.000Z | homeassistant/components/sensor/sma.py | au190/home-assistant | e87ecbd5007acad7468d7118d02b21f6d783c8bc | [
"Apache-2.0"
] | 5 | 2021-02-08T20:50:07.000Z | 2022-03-12T00:39:31.000Z | homeassistant/components/sensor/sma.py | au190/home-assistant | e87ecbd5007acad7468d7118d02b21f6d783c8bc | [
"Apache-2.0"
] | 3 | 2018-08-29T19:26:20.000Z | 2020-01-19T11:58:22.000Z | """
SMA Solar Webconnect interface.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.sma/
"""
import asyncio
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST, CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_SSL, CONF_VERIFY_SSL,
EVENT_HOMEASSISTANT_STOP)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
REQUIREMENTS = ['pysma==0.2.2']
_LOGGER = logging.getLogger(__name__)
CONF_CUSTOM = 'custom'
CONF_FACTOR = 'factor'
CONF_GROUP = 'group'
CONF_KEY = 'key'
CONF_SENSORS = 'sensors'
CONF_UNIT = 'unit'
GROUPS = ['user', 'installer']
def _check_sensor_schema(conf):
"""Check sensors and attributes are valid."""
try:
import pysma
valid = [s.name for s in pysma.SENSORS]
except (ImportError, AttributeError):
return conf
valid.extend(conf[CONF_CUSTOM].keys())
for sname, attrs in conf[CONF_SENSORS].items():
if sname not in valid:
raise vol.Invalid("{} does not exist".format(sname))
for attr in attrs:
if attr in valid:
continue
raise vol.Invalid("{} does not exist [{}]".format(attr, sname))
return conf
CUSTOM_SCHEMA = vol.Any({
vol.Required(CONF_KEY):
vol.All(cv.string, vol.Length(min=13, max=15)),
vol.Required(CONF_UNIT): cv.string,
vol.Optional(CONF_FACTOR, default=1): vol.Coerce(float),
})
PLATFORM_SCHEMA = vol.All(PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_GROUP, default=GROUPS[0]): vol.In(GROUPS),
vol.Required(CONF_SENSORS): vol.Schema({cv.slug: cv.ensure_list}),
vol.Optional(CONF_CUSTOM, default={}):
vol.Schema({cv.slug: CUSTOM_SCHEMA}),
}, extra=vol.PREVENT_EXTRA), _check_sensor_schema)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up SMA WebConnect sensor."""
import pysma
# Check config again during load - dependency available
config = _check_sensor_schema(config)
# Sensor_defs from the custom config
for name, prop in config[CONF_CUSTOM].items():
n_s = pysma.Sensor(name, prop['key'], prop['unit'], prop['factor'])
pysma.add_sensor(n_s)
# Prepare all HASS sensor entities
hass_sensors = []
used_sensors = []
for name, attr in config[CONF_SENSORS].items():
sub_sensors = [pysma.get_sensor(s) for s in attr]
hass_sensors.append(SMAsensor(pysma.get_sensor(name), sub_sensors))
used_sensors.append(name)
used_sensors.extend(attr)
async_add_entities(hass_sensors)
used_sensors = [pysma.get_sensor(s) for s in set(used_sensors)]
# Init the SMA interface
session = async_get_clientsession(hass, verify_ssl=config[CONF_VERIFY_SSL])
grp = config[CONF_GROUP]
url = "http{}://{}".format(
"s" if config[CONF_SSL] else "", config[CONF_HOST])
sma = pysma.SMA(session, url, config[CONF_PASSWORD], group=grp)
# Ensure we logout on shutdown
async def async_close_session(event):
"""Close the session."""
await sma.close_session()
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, async_close_session)
backoff = 0
backoff_step = 0
async def async_sma(event):
"""Update all the SMA sensors."""
nonlocal backoff, backoff_step
if backoff > 1:
backoff -= 1
return
values = await sma.read(used_sensors)
if not values:
try:
backoff = [1, 1, 1, 6, 30][backoff_step]
backoff_step += 1
except IndexError:
backoff = 60
return
backoff_step = 0
tasks = []
for sensor in hass_sensors:
task = sensor.async_update_values()
if task:
tasks.append(task)
if tasks:
await asyncio.wait(tasks, loop=hass.loop)
interval = config.get(CONF_SCAN_INTERVAL) or timedelta(seconds=5)
async_track_time_interval(hass, async_sma, interval)
class SMAsensor(Entity):
"""Representation of a SMA sensor."""
def __init__(self, pysma_sensor, sub_sensors):
"""Initialize the sensor."""
self._sensor = pysma_sensor
self._sub_sensors = sub_sensors
self._attr = {s.name: "" for s in sub_sensors}
self._state = self._sensor.value
@property
def name(self):
"""Return the name of the sensor."""
return self._sensor.name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._sensor.unit
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return self._attr
@property
def poll(self):
"""SMA sensors are updated & don't poll."""
return False
def async_update_values(self):
"""Update this sensor."""
update = False
for sens in self._sub_sensors:
newval = '{} {}'.format(sens.value, sens.unit)
if self._attr[sens.name] != newval:
update = True
self._attr[sens.name] = newval
if self._sensor.value != self._state:
update = True
self._state = self._sensor.value
return self.async_update_ha_state() if update else None
| 30.212121 | 79 | 0.652457 |
acf1eb88505a51015cae7934e94a7c37ad1038b2 | 31,708 | py | Python | salt/modules/schedule.py | alexjennings/salt | 921cfe1fe40f37471ebb58fa6577d72b0d6b77d1 | [
"Apache-2.0"
] | 1 | 2018-09-19T22:42:54.000Z | 2018-09-19T22:42:54.000Z | salt/modules/schedule.py | alexjennings/salt | 921cfe1fe40f37471ebb58fa6577d72b0d6b77d1 | [
"Apache-2.0"
] | null | null | null | salt/modules/schedule.py | alexjennings/salt | 921cfe1fe40f37471ebb58fa6577d72b0d6b77d1 | [
"Apache-2.0"
] | 1 | 2019-07-23T13:42:23.000Z | 2019-07-23T13:42:23.000Z | # -*- coding: utf-8 -*-
'''
Module for managing the Salt schedule on a minion
.. versionadded:: 2014.7.0
'''
# Import Python libs
from __future__ import absolute_import
import copy as pycopy
import difflib
import os
import yaml
# Import salt libs
import salt.utils
import salt.utils.odict
# Import 3rd-party libs
import salt.ext.six as six
__proxyenabled__ = ['*']
import logging
log = logging.getLogger(__name__)
__func_alias__ = {
'list_': 'list',
'reload_': 'reload'
}
SCHEDULE_CONF = [
'name',
'maxrunning',
'function',
'splay',
'range',
'when',
'once',
'once_fmt',
'returner',
'jid_include',
'args',
'kwargs',
'_seconds',
'seconds',
'minutes',
'hours',
'days',
'enabled',
'return_job',
'metadata',
'cron',
'until',
'after',
'return_config',
'return_kwargs'
]
def list_(show_all=False,
show_disabled=True,
where=None,
return_yaml=True):
'''
List the jobs currently scheduled on the minion
CLI Example:
.. code-block:: bash
salt '*' schedule.list
# Show all jobs including hidden internal jobs
salt '*' schedule.list show_all=True
# Hide disabled jobs from list of jobs
salt '*' schedule.list show_disabled=False
'''
schedule = {}
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire']({'func': 'list',
'where': where}, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_list_complete', wait=30)
if event_ret and event_ret['complete']:
schedule = event_ret['schedule']
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret = {}
ret['comment'] = 'Event module not available. Schedule list failed.'
ret['result'] = True
log.debug('Event module not available. Schedule list failed.')
return ret
for job in list(schedule.keys()): # iterate over a copy since we will mutate it
if job == 'enabled':
continue
# Default jobs added by salt begin with __
# by default hide them unless show_all is True.
if job.startswith('__') and not show_all:
del schedule[job]
continue
# if enabled is not included in the job,
# assume job is enabled.
if 'enabled' not in schedule[job]:
schedule[job]['enabled'] = True
for item in pycopy.copy(schedule[job]):
if item not in SCHEDULE_CONF:
del schedule[job][item]
continue
if schedule[job][item] == 'true':
schedule[job][item] = True
if schedule[job][item] == 'false':
schedule[job][item] = False
# if the job is disabled and show_disabled is False, skip job
if not show_disabled and not schedule[job]['enabled']:
del schedule[job]
continue
if '_seconds' in schedule[job]:
# if _seconds is greater than zero
# then include the original back in seconds.
# otherwise remove seconds from the listing as the
# original item didn't include it.
if schedule[job]['_seconds'] > 0:
schedule[job]['seconds'] = schedule[job]['_seconds']
elif 'seconds' in schedule[job]:
del schedule[job]['seconds']
# remove _seconds from the listing
del schedule[job]['_seconds']
if schedule:
if return_yaml:
tmp = {'schedule': schedule}
yaml_out = yaml.safe_dump(tmp, default_flow_style=False)
return yaml_out
else:
return schedule
else:
return {'schedule': {}}
def is_enabled(name):
'''
List a Job only if its enabled
.. versionadded:: 2015.5.3
CLI Example:
.. code-block:: bash
salt '*' schedule.is_enabled name=job_name
'''
current_schedule = __salt__['schedule.list'](show_all=False, return_yaml=False)
if name in current_schedule:
return current_schedule[name]
else:
return {}
def purge(**kwargs):
'''
Purge all the jobs currently scheduled on the minion
CLI Example:
.. code-block:: bash
salt '*' schedule.purge
'''
ret = {'comment': [],
'result': True}
for name in list_(show_all=True, return_yaml=False):
if name == 'enabled':
continue
if name.startswith('__'):
continue
if 'test' in kwargs and kwargs['test']:
ret['result'] = True
ret['comment'].append('Job: {0} would be deleted from schedule.'.format(name))
else:
persist = True
if 'persist' in kwargs:
persist = kwargs['persist']
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire']({'name': name,
'func': 'delete',
'persist': persist}, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_delete_complete', wait=30)
if event_ret and event_ret['complete']:
_schedule_ret = event_ret['schedule']
if name not in _schedule_ret:
ret['result'] = True
ret['comment'].append('Deleted job: {0} from schedule.'.format(name))
else:
ret['comment'].append('Failed to delete job {0} from schedule.'.format(name))
ret['result'] = True
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret['comment'] = 'Event module not available. Schedule add failed.'
ret['result'] = True
return ret
def delete(name, **kwargs):
'''
Delete a job from the minion's schedule
CLI Example:
.. code-block:: bash
salt '*' schedule.delete job1
'''
ret = {'comment': 'Failed to delete job {0} from schedule.'.format(name),
'result': False}
if not name:
ret['comment'] = 'Job name is required.'
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Job: {0} would be deleted from schedule.'.format(name)
ret['result'] = True
else:
persist = True
if 'persist' in kwargs:
persist = kwargs['persist']
if name in list_(show_all=True, where='opts', return_yaml=False):
event_data = {'name': name, 'func': 'delete', 'persist': persist}
elif name in list_(show_all=True, where='pillar', return_yaml=False):
event_data = {'name': name, 'where': 'pillar', 'func': 'delete', 'persist': False}
else:
ret['comment'] = 'Job {0} does not exist.'.format(name)
return ret
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire'](event_data, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_delete_complete', wait=30)
if event_ret and event_ret['complete']:
schedule = event_ret['schedule']
if name not in schedule:
ret['result'] = True
ret['comment'] = 'Deleted Job {0} from schedule.'.format(name)
else:
ret['comment'] = 'Failed to delete job {0} from schedule.'.format(name)
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret['comment'] = 'Event module not available. Schedule add failed.'
return ret
def build_schedule_item(name, **kwargs):
'''
Build a schedule job
CLI Example:
.. code-block:: bash
salt '*' schedule.build_schedule_item job1 function='test.ping' seconds=3600
'''
ret = {'comment': [],
'result': True}
if not name:
ret['comment'] = 'Job name is required.'
ret['result'] = False
return ret
schedule = {}
schedule[name] = salt.utils.odict.OrderedDict()
schedule[name]['function'] = kwargs['function']
time_conflict = False
for item in ['seconds', 'minutes', 'hours', 'days']:
if item in kwargs and 'when' in kwargs:
time_conflict = True
if item in kwargs and 'cron' in kwargs:
time_conflict = True
if time_conflict:
ret['result'] = False
ret['comment'] = 'Unable to use "seconds", "minutes", "hours", or "days" with "when" or "cron" options.'
return ret
if 'when' in kwargs and 'cron' in kwargs:
ret['result'] = False
ret['comment'] = 'Unable to use "when" and "cron" options together. Ignoring.'
return ret
for item in ['seconds', 'minutes', 'hours', 'days']:
if item in kwargs:
schedule[name][item] = kwargs[item]
if 'return_job' in kwargs:
schedule[name]['return_job'] = kwargs['return_job']
if 'metadata' in kwargs:
schedule[name]['metadata'] = kwargs['metadata']
if 'job_args' in kwargs:
schedule[name]['args'] = kwargs['job_args']
if 'job_kwargs' in kwargs:
schedule[name]['kwargs'] = kwargs['job_kwargs']
if 'maxrunning' in kwargs:
schedule[name]['maxrunning'] = kwargs['maxrunning']
else:
schedule[name]['maxrunning'] = 1
if 'name' in kwargs:
schedule[name]['name'] = kwargs['name']
else:
schedule[name]['name'] = name
if 'enabled' in kwargs:
schedule[name]['enabled'] = kwargs['enabled']
else:
schedule[name]['enabled'] = True
if 'jid_include' not in kwargs or kwargs['jid_include']:
schedule[name]['jid_include'] = True
if 'splay' in kwargs:
if isinstance(kwargs['splay'], dict):
# Ensure ordering of start and end arguments
schedule[name]['splay'] = salt.utils.odict.OrderedDict()
schedule[name]['splay']['start'] = kwargs['splay']['start']
schedule[name]['splay']['end'] = kwargs['splay']['end']
else:
schedule[name]['splay'] = kwargs['splay']
for item in ['range', 'when', 'once', 'once_fmt', 'cron', 'returner',
'return_config', 'return_kwargs', 'until', 'enabled']:
if item in kwargs:
schedule[name][item] = kwargs[item]
# if enabled is not included in the job,
# assume job is enabled.
if 'enabled' not in kwargs:
schedule[name]['enabled'] = True
return schedule[name]
def add(name, **kwargs):
'''
Add a job to the schedule
CLI Example:
.. code-block:: bash
salt '*' schedule.add job1 function='test.ping' seconds=3600
# If function have some arguments, use job_args
salt '*' schedule.add job2 function='cmd.run' job_args="['date >> /tmp/date.log']" seconds=60
'''
ret = {'comment': 'Failed to add job {0} to schedule.'.format(name),
'result': False}
if name in list_(show_all=True, return_yaml=False):
ret['comment'] = 'Job {0} already exists in schedule.'.format(name)
ret['result'] = False
return ret
if not name:
ret['comment'] = 'Job name is required.'
ret['result'] = False
time_conflict = False
for item in ['seconds', 'minutes', 'hours', 'days']:
if item in kwargs and 'when' in kwargs:
time_conflict = True
if item in kwargs and 'cron' in kwargs:
time_conflict = True
if time_conflict:
ret['comment'] = 'Error: Unable to use "seconds", "minutes", "hours", or "days" with "when" or "cron" options.'
return ret
if 'when' in kwargs and 'cron' in kwargs:
ret['comment'] = 'Unable to use "when" and "cron" options together. Ignoring.'
return ret
persist = True
if 'persist' in kwargs:
persist = kwargs['persist']
_new = build_schedule_item(name, **kwargs)
schedule_data = {}
schedule_data[name] = _new
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Job: {0} would be added to schedule.'.format(name)
ret['result'] = True
else:
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire']({'name': name,
'schedule': schedule_data,
'func': 'add',
'persist': persist}, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_add_complete', wait=30)
if event_ret and event_ret['complete']:
schedule = event_ret['schedule']
if name in schedule:
ret['result'] = True
ret['comment'] = 'Added job: {0} to schedule.'.format(name)
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret['comment'] = 'Event module not available. Schedule add failed.'
return ret
def modify(name, **kwargs):
'''
Modify an existing job in the schedule
CLI Example:
.. code-block:: bash
salt '*' schedule.modify job1 function='test.ping' seconds=3600
'''
ret = {'comment': '',
'changes': {},
'result': True}
time_conflict = False
for item in ['seconds', 'minutes', 'hours', 'days']:
if item in kwargs and 'when' in kwargs:
time_conflict = True
if item in kwargs and 'cron' in kwargs:
time_conflict = True
if time_conflict:
ret['result'] = False
ret['comment'] = 'Error: Unable to use "seconds", "minutes", "hours", or "days" with "when" option.'
return ret
if 'when' in kwargs and 'cron' in kwargs:
ret['result'] = False
ret['comment'] = 'Unable to use "when" and "cron" options together. Ignoring.'
return ret
current_schedule = list_(show_all=True, return_yaml=False)
if name not in current_schedule:
ret['comment'] = 'Job {0} does not exist in schedule.'.format(name)
ret['result'] = False
return ret
_current = current_schedule[name]
if '_seconds' in _current:
_current['seconds'] = _current['_seconds']
del _current['_seconds']
_new = build_schedule_item(name, **kwargs)
if _new == _current:
ret['comment'] = 'Job {0} in correct state'.format(name)
return ret
_current_lines = ['{0}:{1}\n'.format(key, value)
for (key, value) in sorted(_current.items())]
_new_lines = ['{0}:{1}\n'.format(key, value)
for (key, value) in sorted(_new.items())]
_diff = difflib.unified_diff(_current_lines, _new_lines)
ret['changes']['diff'] = ''.join(_diff)
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Job: {0} would be modified in schedule.'.format(name)
else:
persist = True
if 'persist' in kwargs:
persist = kwargs['persist']
if name in list_(show_all=True, where='opts', return_yaml=False):
event_data = {'name': name,
'schedule': _new,
'func': 'modify',
'persist': persist}
elif name in list_(show_all=True, where='pillar', return_yaml=False):
event_data = {'name': name,
'schedule': _new,
'where': 'pillar',
'func': 'modify',
'persist': False}
out = __salt__['event.fire'](event_data, 'manage_schedule')
if out:
ret['comment'] = 'Modified job: {0} in schedule.'.format(name)
else:
ret['comment'] = 'Failed to modify job {0} in schedule.'.format(name)
ret['result'] = False
return ret
def run_job(name, force=False):
'''
Run a scheduled job on the minion immediately
CLI Example:
.. code-block:: bash
salt '*' schedule.run_job job1
salt '*' schedule.run_job job1 force=True
Force the job to run even if it is disabled.
'''
ret = {'comment': [],
'result': True}
if not name:
ret['comment'] = 'Job name is required.'
ret['result'] = False
schedule = list_(show_all=True, return_yaml=False)
if name in schedule:
data = schedule[name]
if 'enabled' in data and not data['enabled'] and not force:
ret['comment'] = 'Job {0} is disabled.'.format(name)
else:
out = __salt__['event.fire']({'name': name, 'func': 'run_job'}, 'manage_schedule')
if out:
ret['comment'] = 'Scheduling Job {0} on minion.'.format(name)
else:
ret['comment'] = 'Failed to run job {0} on minion.'.format(name)
ret['result'] = False
else:
ret['comment'] = 'Job {0} does not exist.'.format(name)
ret['result'] = False
return ret
def enable_job(name, **kwargs):
'''
Enable a job in the minion's schedule
CLI Example:
.. code-block:: bash
salt '*' schedule.enable_job job1
'''
ret = {'comment': [],
'result': True}
if not name:
ret['comment'] = 'Job name is required.'
ret['result'] = False
if 'test' in __opts__ and __opts__['test']:
ret['comment'] = 'Job: {0} would be enabled in schedule.'.format(name)
else:
persist = True
if 'persist' in kwargs:
persist = kwargs['persist']
if name in list_(show_all=True, where='opts', return_yaml=False):
event_data = {'name': name, 'func': 'enable_job', 'persist': persist}
elif name in list_(show_all=True, where='pillar', return_yaml=False):
event_data = {'name': name, 'where': 'pillar', 'func': 'enable_job', 'persist': False}
else:
ret['comment'] = 'Job {0} does not exist.'.format(name)
ret['result'] = False
return ret
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire'](event_data, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_enabled_job_complete', wait=30)
if event_ret and event_ret['complete']:
schedule = event_ret['schedule']
# check item exists in schedule and is enabled
if name in schedule and schedule[name]['enabled']:
ret['result'] = True
ret['comment'] = 'Enabled Job {0} in schedule.'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Failed to enable job {0} in schedule.'.format(name)
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret['comment'] = 'Event module not available. Schedule enable job failed.'
return ret
def disable_job(name, **kwargs):
'''
Disable a job in the minion's schedule
CLI Example:
.. code-block:: bash
salt '*' schedule.disable_job job1
'''
ret = {'comment': [],
'result': True}
if not name:
ret['comment'] = 'Job name is required.'
ret['result'] = False
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Job: {0} would be disabled in schedule.'.format(name)
else:
persist = True
if 'persist' in kwargs:
persist = kwargs['persist']
if name in list_(show_all=True, where='opts', return_yaml=False):
event_data = {'name': name, 'func': 'disable_job', 'persist': persist}
elif name in list_(show_all=True, where='pillar'):
event_data = {'name': name, 'where': 'pillar', 'func': 'disable_job', 'persist': False}
else:
ret['comment'] = 'Job {0} does not exist.'.format(name)
ret['result'] = False
return ret
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire'](event_data, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_disabled_job_complete', wait=30)
if event_ret and event_ret['complete']:
schedule = event_ret['schedule']
# check item exists in schedule and is enabled
if name in schedule and not schedule[name]['enabled']:
ret['result'] = True
ret['comment'] = 'Disabled Job {0} in schedule.'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Failed to disable job {0} in schedule.'.format(name)
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret['comment'] = 'Event module not available. Schedule enable job failed.'
return ret
def save(**kwargs):
'''
Save all scheduled jobs on the minion
CLI Example:
.. code-block:: bash
salt '*' schedule.save
'''
ret = {'comment': [],
'result': True}
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Schedule would be saved.'
else:
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire']({'func': 'save_schedule'}, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_saved', wait=30)
if event_ret and event_ret['complete']:
ret['result'] = True
ret['comment'] = 'Schedule (non-pillar items) saved.'
else:
ret['result'] = False
ret['comment'] = 'Failed to save schedule.'
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret['comment'] = 'Event module not available. Schedule save failed.'
return ret
def enable(**kwargs):
'''
Enable all scheduled jobs on the minion
CLI Example:
.. code-block:: bash
salt '*' schedule.enable
'''
ret = {'comment': [],
'result': True}
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Schedule would be enabled.'
else:
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire']({'func': 'enable'}, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_enabled_complete', wait=30)
if event_ret and event_ret['complete']:
schedule = event_ret['schedule']
if 'enabled' in schedule and schedule['enabled']:
ret['result'] = True
ret['comment'] = 'Enabled schedule on minion.'
else:
ret['result'] = False
ret['comment'] = 'Failed to enable schedule on minion.'
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret['comment'] = 'Event module not available. Schedule enable job failed.'
return ret
def disable(**kwargs):
'''
Disable all scheduled jobs on the minion
CLI Example:
.. code-block:: bash
salt '*' schedule.disable
'''
ret = {'comment': [],
'result': True}
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Schedule would be disabled.'
else:
try:
eventer = salt.utils.event.get_event('minion', opts=__opts__)
res = __salt__['event.fire']({'func': 'disable'}, 'manage_schedule')
if res:
event_ret = eventer.get_event(tag='/salt/minion/minion_schedule_disabled_complete', wait=30)
if event_ret and event_ret['complete']:
schedule = event_ret['schedule']
if 'enabled' in schedule and not schedule['enabled']:
ret['result'] = True
ret['comment'] = 'Disabled schedule on minion.'
else:
ret['result'] = False
ret['comment'] = 'Failed to disable schedule on minion.'
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
ret['comment'] = 'Event module not available. Schedule enable job failed.'
return ret
def reload_():
'''
Reload saved scheduled jobs on the minion
CLI Example:
.. code-block:: bash
salt '*' schedule.reload
'''
ret = {'comment': [],
'result': True}
# If there a schedule defined in pillar, refresh it.
if 'schedule' in __pillar__:
out = __salt__['event.fire']({}, 'pillar_refresh')
if out:
ret['comment'].append('Reloaded schedule from pillar on minion.')
else:
ret['comment'].append('Failed to reload schedule from pillar on minion.')
ret['result'] = False
# move this file into an configurable opt
sfn = '{0}/{1}/schedule.conf'.format(__opts__['config_dir'], os.path.dirname(__opts__['default_include']))
if os.path.isfile(sfn):
with salt.utils.fopen(sfn, 'rb') as fp_:
try:
schedule = yaml.safe_load(fp_.read())
except yaml.YAMLError as exc:
ret['comment'].append('Unable to read existing schedule file: {0}'.format(exc))
if schedule:
if 'schedule' in schedule and schedule['schedule']:
out = __salt__['event.fire']({'func': 'reload', 'schedule': schedule}, 'manage_schedule')
if out:
ret['comment'].append('Reloaded schedule on minion from schedule.conf.')
else:
ret['comment'].append('Failed to reload schedule on minion from schedule.conf.')
ret['result'] = False
else:
ret['comment'].append('Failed to reload schedule on minion. Saved file is empty or invalid.')
ret['result'] = False
else:
ret['comment'].append('Failed to reload schedule on minion. Saved file is empty or invalid.')
ret['result'] = False
return ret
def move(name, target, **kwargs):
'''
Move scheduled job to another minion or minions.
CLI Example:
.. code-block:: bash
salt '*' schedule.move jobname target
'''
ret = {'comment': [],
'result': True}
if not name:
ret['comment'] = 'Job name is required.'
ret['result'] = False
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Job: {0} would be moved from schedule.'.format(name)
else:
opts_schedule = list_(show_all=True, where='opts', return_yaml=False)
pillar_schedule = list_(show_all=True, where='pillar', return_yaml=False)
if name in opts_schedule:
schedule_data = opts_schedule[name]
where = None
elif name in pillar_schedule:
schedule_data = pillar_schedule[name]
where = 'pillar'
else:
ret['comment'] = 'Job {0} does not exist.'.format(name)
ret['result'] = False
return ret
schedule_opts = []
for key, value in six.iteritems(schedule_data):
temp = '{0}={1}'.format(key, value)
schedule_opts.append(temp)
response = __salt__['publish.publish'](target, 'schedule.add', schedule_opts)
# Get errors and list of affeced minions
errors = []
minions = []
for minion in response:
minions.append(minion)
if not response[minion]:
errors.append(minion)
# parse response
if not response:
ret['comment'] = 'no servers answered the published schedule.add command'
return ret
elif len(errors) > 0:
ret['comment'] = 'the following minions return False'
ret['minions'] = errors
return ret
else:
delete(name, where=where)
ret['result'] = True
ret['comment'] = 'Moved Job {0} from schedule.'.format(name)
ret['minions'] = minions
return ret
return ret
def copy(name, target, **kwargs):
'''
Copy scheduled job to another minion or minions.
CLI Example:
.. code-block:: bash
salt '*' schedule.copy jobname target
'''
ret = {'comment': [],
'result': True}
if not name:
ret['comment'] = 'Job name is required.'
ret['result'] = False
if 'test' in kwargs and kwargs['test']:
ret['comment'] = 'Job: {0} would be copied from schedule.'.format(name)
else:
opts_schedule = list_(show_all=True, where='opts', return_yaml=False)
pillar_schedule = list_(show_all=True, where='pillar', return_yaml=False)
if name in opts_schedule:
schedule_data = opts_schedule[name]
elif name in pillar_schedule:
schedule_data = pillar_schedule[name]
else:
ret['comment'] = 'Job {0} does not exist.'.format(name)
ret['result'] = False
return ret
schedule_opts = []
for key, value in six.iteritems(schedule_data):
temp = '{0}={1}'.format(key, value)
schedule_opts.append(temp)
response = __salt__['publish.publish'](target, 'schedule.add', schedule_opts)
# Get errors and list of affeced minions
errors = []
minions = []
for minion in response:
minions.append(minion)
if not response[minion]:
errors.append(minion)
# parse response
if not response:
ret['comment'] = 'no servers answered the published schedule.add command'
return ret
elif len(errors) > 0:
ret['comment'] = 'the following minions return False'
ret['minions'] = errors
return ret
else:
ret['result'] = True
ret['comment'] = 'Copied Job {0} from schedule to minion(s).'.format(name)
ret['minions'] = minions
return ret
return ret
| 32.858031 | 119 | 0.547181 |
acf1ebdebcaf15dd613799a2c752cd8621ca9a3e | 7,853 | py | Python | async_server_client.py | niranjanhegde144/asyncio_server_client | 7b7bd76cadebcaec2f0feab3f729ab508a9caa51 | [
"MIT"
] | null | null | null | async_server_client.py | niranjanhegde144/asyncio_server_client | 7b7bd76cadebcaec2f0feab3f729ab508a9caa51 | [
"MIT"
] | null | null | null | async_server_client.py | niranjanhegde144/asyncio_server_client | 7b7bd76cadebcaec2f0feab3f729ab508a9caa51 | [
"MIT"
] | null | null | null | """
Asynchronous Server which listens to unix socket, TCP and UDP sockets using
event loops
"""
import asyncio
import socket
from concurrent.futures import CancelledError
class UDPProtocol:
"""
Udp Protocol to receive data from UDP sockets
"""
def __init__(self, callback, on_con_lost):
self.transport = None
self.callback = callback
self.on_con_lost = on_con_lost
def connection_made(self, transport):
'''
Creates new transport for UDP data transfers
'''
self.transport = transport
def datagram_received(self, data, addr):
'''
UDP protocol for receiving UDP datagram
Called Whenever a data is to be received
'''
self.callback(data, addr, transport=self.transport)
def connection_lost(self, exc):
'''
Removes client when connection lost with the client.
'''
try:
self.on_con_lost.set_result(True)
except Exception:
print(exc)
class Server:
"""
Asynchronous Server using asyncio module using sockets
"""
# pylint: disable=too-many-instance-attributes
# Eight is reasonable in this case.
def __init__(self, host="localhost", port=9000, pathname=None):
self.server_unix_socket = None
self.server_tcp_socket = None
self.client = None
self.addr = None
self.host = host
self.port = port
self.tcp_task = None
self.unix_task = None
self.count = 0
try:
self.create_stream_socket()
if pathname:
self.create_unix_socket(pathname)
except FileNotFoundError:
print("File %s not found" % (pathname))
except OSError:
print("Address %s already in use" % (pathname))
except TypeError as exc:
print(exc)
except Exception as exc:
print(exc)
finally:
print("Server listening for TCP and UDP sockets on",
(self.host, self.port))
if pathname:
print("Server listening for Unix sockets on", pathname)
def create_unix_socket(self, pathname):
'''
Creates unix socket
'''
try:
self.server_unix_socket = socket.socket(
socket.AF_UNIX, socket.SOCK_STREAM)
self.server_unix_socket.bind(pathname)
self.server_unix_socket.listen(10)
self.server_unix_socket.setblocking(False)
except socket.error as err:
print(err)
def create_stream_socket(self):
'''
Creates TCP Stream socket
'''
try:
self.server_tcp_socket = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
self.server_tcp_socket.setsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server_tcp_socket.bind((self.host, self.port))
self.server_tcp_socket.listen(10)
self.server_tcp_socket.setblocking(False)
except socket.error as exc:
print(exc)
async def accept_unix_connection(self, loop, unix_socket):
'''
Establishes connection with the unix socket
'''
self.client, self.addr = await loop.sock_accept(unix_socket)
print("Accepted UNIX connection")
self.tcp_task.cancel()
if self.unix_task:
self.unix_task.cancel()
async def accept_tcp_connection(self, loop, tcp_socket):
'''
Establishes connection with the tcp socket
'''
self.client, self.addr = await loop.sock_accept(tcp_socket)
print("Accepted TCP connection")
self.tcp_task.cancel()
if self.unix_task:
self.unix_task.cancel()
async def udp_connection(self, loop, callback):
'''
Creates a UDP connection
'''
on_con_lost = loop.create_future()
transport, protocol = await loop.create_datagram_endpoint(
lambda: UDPProtocol(callback, on_con_lost), local_addr=(self.host, self.port))
try:
await on_con_lost
finally:
transport.close()
async def accept(self, loop, callback):
'''
Establishes connection with the client
'''
tasks = []
if self.server_unix_socket:
self.unix_task = loop.create_task(self.accept_unix_connection(
loop, self.server_unix_socket))
tasks.append(self.unix_task)
self.tcp_task = loop.create_task(self.accept_tcp_connection(
loop, self.server_tcp_socket))
if self.count == 0:
udp_task = loop.create_task(
self.udp_connection(loop, callback))
tasks.append(udp_task)
self.count += 1
tasks.append(self.tcp_task)
try:
await asyncio.gather(*tasks)
except CancelledError:
pass
return self.client, self.addr
async def recv(self, loop, client, addr, callback=None):
'''
Handles data receiving in asynchronous manner
'''
while True:
try:
data = (await loop.sock_recv(client, 4096))
if not data:
break
if callback:
callback(data, addr, client_sock=client)
except AttributeError as err:
print(err)
except Exception as exc:
print("Client closed connection", exc)
client.close()
async def send(self, loop, client, data):
'''
Sends data to the client
'''
try:
await loop.sock_sendall(client, data.encode('utf8'))
except Exception as exc:
print(exc)
self.close_client(client)
def close_client(self, client):
'''
Close the client connection
'''
print("Closing connection with the client")
client.close()
class Client:
"""
Class for client socket
"""
def __init__(self, host="localhost", port=9000, pathname=None):
self.client_socket = None
if pathname:
self.connect_unix_socket(pathname)
else:
self.connect_stream_socket(host, port)
def connect_stream_socket(self, host, port):
'''
Initalises connection to the server as TCP stream socket
'''
try:
self.client_socket = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect((host, port))
except socket.error as err:
print(err)
def connect_unix_socket(self, pathname):
'''
Initialises connection to the server as UNIX stream socket
'''
try:
self.client_socket = socket.socket(
socket.AF_UNIX, socket.SOCK_STREAM)
self.client_socket.connect(pathname)
except socket.error as err:
print(err)
def send(self, data):
'''
Sends the request to the server
'''
try:
data = data.encode()
self.client_socket.send(data)
except AttributeError as err:
print(err)
except Exception as exc:
print("Cannot send data to the server", exc)
self.close()
def receive(self, size=4096):
'''
Receives the response from the server
'''
try:
data = self.client_socket.recv(size)
return data
except socket.error as err:
print(err)
def close(self):
'''
Closes the connection with the server
'''
print("Closing connection with the Server")
self.client_socket.close()
| 29.522556 | 90 | 0.574175 |
acf1ebfed2603a244e1de4e04aeb7d01a8c51908 | 1,088 | py | Python | antlir/test_utils.py | SaurabhAgarwala/antlir | d9513d35d3eaa9d28717a40057a14d099c6ec775 | [
"MIT"
] | null | null | null | antlir/test_utils.py | SaurabhAgarwala/antlir | d9513d35d3eaa9d28717a40057a14d099c6ec775 | [
"MIT"
] | null | null | null | antlir/test_utils.py | SaurabhAgarwala/antlir | d9513d35d3eaa9d28717a40057a14d099c6ec775 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from contextlib import contextmanager
class TestUtilsMixin:
"""Use as a mixin with a class inheriting unittest.TestCase to provide some
useful helpers.
"""
def assert_call_equality(self, mock, expected_calls, **kwargs):
"""Helper to ensure a given mock had *only* the expected calls by also
asserting the length of the iterable.
"""
self.assertEqual(len(mock.mock_calls), len(expected_calls))
mock.assert_has_calls(expected_calls, **kwargs)
@contextmanager
def patch_ctx_mgr(self, to_patch):
"""Mocks a context manager by returning the 'entered' object. To use,
pass in an unstarted patch.
Example:
with patch_ctx_mgr(mock.patch.object(mod, "attr")) as patched:
...
"""
with to_patch as patched:
yield patched.return_value.__enter__.return_value
| 32.969697 | 79 | 0.67739 |
acf1ed24f5e623485faf21cd5d25f84069267d9e | 9,721 | py | Python | Products/LDAPUserFolder/help/LDAPUserFolder.py | eaudeweb/Products.LDAPUserFolder | 3da7155973ac329ffd76f05b060980a29a04a55c | [
"ZPL-2.1"
] | null | null | null | Products/LDAPUserFolder/help/LDAPUserFolder.py | eaudeweb/Products.LDAPUserFolder | 3da7155973ac329ffd76f05b060980a29a04a55c | [
"ZPL-2.1"
] | 1 | 2018-10-08T12:49:27.000Z | 2018-10-08T12:49:27.000Z | Products/LDAPUserFolder/help/LDAPUserFolder.py | eaudeweb/Products.LDAPUserFolder | 3da7155973ac329ffd76f05b060980a29a04a55c | [
"ZPL-2.1"
] | 1 | 2020-02-12T17:33:25.000Z | 2020-02-12T17:33:25.000Z | ##############################################################################
#
# Copyright (c) 2000-2009 Jens Vagelpohl and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" The LDAPUserFolder interface
$Id$
"""
class LDAPUserFolder:
"""
This interface file lists methods available for scripting
LDAPUserFolder objects.
Some others are accessible given the correct permissions but since
they are used only in the internal workings of the LDAPUserFolder
they are not listed here.
"""
def getUsers():
"""
Return all user objects. Since the number of user records in
an LDAP database is potentially very large this method will
only return those user objects that are in the internal cache
of the LDAPUserFolder and not expired.
Permission - Always available
"""
def getUserNames():
"""
Return a list of user IDs for all users that can be found
given the selected user search base and search scope.
This method will return a simple error message if the
number of users exceeds the limit of search hits that is
built into the python-ldap module.
Permission - Always available
"""
def getUser(name):
"""
Return the user object for the user "name". if the user
cannot be found, None will be returned.
Permission - Always available
"""
def getUserById(id):
"""
Return the user object with the UserID "id". The User ID
may be different from the "Name", the Login. To get a user
by its Login, call getUser.
Permission - Always available
"""
def validate(REQUEST, auth_info, roles):
"""
This method is not called from any DTML or python product
code, but it is the heart of any user folder. Do not use
it in your code, this explanation is just for understandings
sake.
This method is called by the Zope security machinery. Its
arguments are the current REQUEST dictionary, the username
and password pair returned by a basic auth login box
(concatenated with a colon into the auth_info argument), and
the roles that are given to any user by specifying "Default
Roles" in the LDAPAdapter setup.
If successful, a new LDAPUser object is created and returned.
If not, None is returned, which will cause the Zope security
machinery to throw an exception indicating that the user is
not authenticated to access the resource.
Permission - From python only
"""
def getGroups(attr):
"""
Return a list of available group records under the group record
base as defined in the LDAPUserFolder. The attr argument determines
what gets returned and it can have the following values:
o None: A list of tuples is returned where the group CN is the first
and the group full DN is the second element.
o cn: A list of CN strings is returned.
o dn: A list of full DN strings is returned.
Permission: *Manage users*
"""
def manage_addGroup(newgroup_name, REQUEST):
"""
Add a new group under the group record base. If REQUEST is not None a
MessageDialog screen will be returned. The group_name argument forms
the new group CN while the full DN will be formed by combining this
new CN with the group base DN.
Since a group record cannot be empty, meaning there must be at least
a single uniqueMember element in it, the DN given as the binduid in
the LDAPUserFolder configuration is inserted.
Permission: *Manage users*
"""
def manage_deleteGroups(dns, REQUEST):
"""
Delete groups specified by a list of group DN strings which are handed
in as the *dns* argument.
Permission: *Manage users*
"""
def findUser(search_param, search_term):
"""
Find user records given the *search_param* string (which is the name
of an LDAP attribute) and the *search_term* value.
This method will return a list of dictionaries where each matching
record is represented by a dictionary. The dictionary will contain
a key/value pair for each LDAP attribute, including *dn*, that is
present for the given user record.
Permission: *Manage users*
"""
def searchUsers(attrs=[], exact_match=False, **kw):
"""
Search for user records by one or more attributes.
This method takes any passed-in search parameters and values as
keyword arguments and will sort out invalid keys automatically. It
accepts all three forms an attribute can be known as, its real
ldap name, the name an attribute is mapped to explicitly, and the
friendly name it is known by.
Permission: *Manage users*
"""
def getUserDetails(encoded_dn, format, attrs):
"""
Retrieves all details for a user record represented by the DN that
is handed in as the URL-encoded *encoded_dn* argument. The format
argument determines the format of the returned data and can have
two values:
o None: All user attributes are handed back as a list of tuples
where the first element of each tuple contains the LDAP attribute
name and the second element contains the value.
o dictionary: The user record is handed back as a simple dictionary
of attributes as key/value pairs.
The desired attributes can be limited by passing in a sequence of
attribute names as the attrs argument.
Permission: *Manage users*
"""
def isUnique(attr, value):
"""
Determine whether a given LDAP attribute (attr) and its value (value)
are unique in the LDAP tree branch set as the user record base in the
LDAPUserFolder. This method should be called before inserting a new
user record with attr being the attribute chosen as the login name in
your LDAPUserFolder because that attribute value must be unique.
This method will return a truth value (1) if the given attribute value
is indeed unique, 0 if it is not and in the case of an exception it
will return the string describing the exception.
Permission: *Manage users*
"""
def manage_addUser(REQUEST, kwargs):
"""
Create a new user record. If REQUEST is not None, it will be used to
retrieve the values for the user record.
To use this method from Python you must pass None as the REQUEST
argument and a dictionary called *kwargs* containing key/value pairs
for the user record attributes.
The dictionary of values passed in, be it REQUEST or kwargs, must at
the very least contain the following keys and values:
o *cn* or *uid* (depending on what you set the RDN attribute to)
o *user_pw* (the new user record's password)
o *confirm_pw* (This must match password)
o all attributes your user record LDAP schema must contain (consult
your LDAP server schema)
Only those attributes and values are used that are specified on the
LDAP Schema tab of your LDAPUserFolder.
Permission: *Manage users*
"""
def manage_editUser(user_dn, REQUEST, kwargs):
"""
Edit an existing user record. If REQUEST is not None, it will be used to
retrieve the values for the user record.
To use this method from Python you must pass None as the REQUEST
argument and a dictionary called *kwargs* containing key/value pairs
for the user record attributes.
Only those attributes and values are used that are specified on the
LDAP Schema tab of your LDAPUserFolder.
This method will handle modified RDN (Relative Distinguished name)
attributes correctly and execute a *modrdn* as well if needed,
including changing the DN in all group records it is part of.
Permission: *Manage users*
"""
def manage_editUserPassword(dn, new_pw, REQUEST):
"""
Change a users password. The *dn* argument contains the full DN for
the user record in question and new_pw contains the new password.
Permission: *Manage users*
"""
def manage_editUserRoles(user_dn, role_dns, REQUEST):
"""
Change a user's group memberships. The user is specified by a full DN
string, handed in as the *user_dn* attribute. All group records the
user is supposed to be part of are handed in as *role_dns*, a list
of DN strings for group records.
Permission: *Manage users*
"""
def manage_deleteUsers(dns, REQUEST):
"""
Delete the user records given by a list of DN strings. The user records
will be deleted and their mentioning in any group record as well.
Permission: *Manage users*
"""
| 32.952542 | 80 | 0.647464 |
acf1ef473c267d874f876cc311eefab9f7a48ffc | 4,181 | py | Python | Briefly/api/testing.py | q815101630/Briefly2.0 | d92ba52308ef8c644fe8fb453169d0bee1a7f47e | [
"MIT"
] | null | null | null | Briefly/api/testing.py | q815101630/Briefly2.0 | d92ba52308ef8c644fe8fb453169d0bee1a7f47e | [
"MIT"
] | null | null | null | Briefly/api/testing.py | q815101630/Briefly2.0 | d92ba52308ef8c644fe8fb453169d0bee1a7f47e | [
"MIT"
] | 1 | 2021-10-06T03:50:19.000Z | 2021-10-06T03:50:19.000Z | # from youtube_transcript_api import YouTubeTranscriptApi
# from pytube import YouTube
# import xml.etree.ElementTree as ET
# import html
# from sentence_process import SentenceHandler
# from punctuator import Punctuator
# import datetime
# def get_video_Transcript(url):
# video_id = url.split("=")[-1]
# transcript = YouTubeTranscriptApi.get_transcript(video_id)
# yt = YouTube(url)
# file = yt.captions['.en'] if '.en' in yt.captions else yt.captions['a.en']
# tree = ET.fromstring(file.xml_captions)
# notags = ET.tostring(tree, encoding='unicode', method='text')
# notags = html.unescape(notags)
# notags = notags.replace('\n', ' ')
# notags = ' '.join(notags.split())
# video = None
# video = yt.streams.filter(progressive=True, file_extension='mp4', res='720p').first()
# if not video:
# video = yt.streams.filter(progressive=True, file_extension='mp4', res='480p').first()
# elif not video:
# video = yt.streams.filter(progressive=True, file_extension='mp4', res='360p').first()
# elif not video:
# video = yt.streams.filter(progressive=True, file_extension='mp4', res='240p').first()
# elif not video:
# video = yt.streams.filter(progressive=True, file_extension='mp4', res='144p').first()
# #instance = retrieve_media(video_info)
# # for Allen: change transcript format based on your defined format, start here
# print(transcript)
# # end
# # instance.transcript = dumps(transcript)
# # instance.audioText = notags
# #video_path = video.download()
# # video_file = open(video_path, 'rb')
# # djangofile = File(video_file)
# # instance.video = djangofile
# # instance.fileSize = djangofile.size
# # profile = UserProfile.objects.select_for_update().filter(user=user_id)[0]
# # with transaction.atomic():
# # profile.remaining_size -= djangofile.size
# # profile.save()
# # instance.save()
# # video_file.close()
# # os.remove(video_path)
# return (notags, transcript)
# def get_sentence_words_count(sentences):
# count = 0
# words_counter = []
# for index, sentence in enumerate(sentences):
# words_count = len(sentence.split(' '))
# words_counter.append([count, sentence])
# count += words_count
# return words_counter
# def process_transcript(transcript):
# counter_trans = []
# counter = 0
# for sen in transcript:
# counter += len(sen['text'].split(' '))
# sen['word_count'] = counter
# counter_trans.append(sen)
# return counter_trans
# url = "https://www.youtube.com/watch?v=THxCy-6EnQM"
# (notags, transcript) = get_video_Transcript(url)
# print('checkpoint')
# p = Punctuator('puntuator_model/model.pcl')
# result = p.punctuate(notags)
# # print(result)
# sentence_handler = SentenceHandler()
# sentences = sentence_handler(result, 5, 600)
# print(sentences)
# sentence_count = get_sentence_words_count(sentences)
# term_count = process_transcript(transcript)
# lines = []
# total_time = transcript[-1]['start'] + transcript[-1]['duration']
# word_index = 0
# sentence_index = 0
# sentence_num = len(sentence_count)
# for term in term_count:
# if sentence_index >= sentence_num:
# break
# target_sen = sentence_count[sentence_index]
# while term['word_count'] >= target_sen[0] and sentence_index < sentence_num:
# lines.append({'id': sentence_index, 'sentence': target_sen[1],'time': term['start'] })
# sentence_index += 1
# if sentence_index >= sentence_num:
# break
# target_sen = sentence_count[sentence_index]
# if sentence_index < sentence_num - 1:
# print("alert!! timestamp error, please check speech_to_text file read output function")
# for line_data in lines:
# line_data['displayed_time'] = '[' + str(datetime.timedelta(seconds=int(round(float(line_data['time']))))) + ']'
# line_data['time'] = float(line_data['time']) / float(total_time)
# paperContent = pdfplumber.open(paperFilePath).pages
# showPaperSummary(paperContent)
# print(lines)
| 33.99187 | 117 | 0.656302 |
acf1f0db924c9c5b70dcc668f3bc773dff750ebb | 2,401 | py | Python | mmdet/models/roi_heads/__init__.py | HenryOsborne/xViewDetection | 28bbeb263b05c4c892eae87713e34c62cbe606d5 | [
"Apache-2.0"
] | null | null | null | mmdet/models/roi_heads/__init__.py | HenryOsborne/xViewDetection | 28bbeb263b05c4c892eae87713e34c62cbe606d5 | [
"Apache-2.0"
] | null | null | null | mmdet/models/roi_heads/__init__.py | HenryOsborne/xViewDetection | 28bbeb263b05c4c892eae87713e34c62cbe606d5 | [
"Apache-2.0"
] | null | null | null | from .base_roi_head import BaseRoIHead
from .bbox_heads import (BBoxHead, ConvFCBBoxHead, DIIHead,
DoubleConvFCBBoxHead, SABLHead, SCNetBBoxHead,
Shared2FCBBoxHead, Shared4Conv1FCBBoxHead)
from .cascade_roi_head import CascadeRoIHead
from .double_roi_head import DoubleHeadRoIHead
from .dynamic_roi_head import DynamicRoIHead
from .grid_roi_head import GridRoIHead
from .htc_roi_head import HybridTaskCascadeRoIHead
from .mask_heads import (CoarseMaskHead, FCNMaskHead, FeatureRelayHead,
FusedSemanticHead, GlobalContextHead, GridHead,
HTCMaskHead, MaskIoUHead, MaskPointHead,
SCNetMaskHead, SCNetSemanticHead)
from .mask_scoring_roi_head import MaskScoringRoIHead
from .pisa_roi_head import PISARoIHead
from .point_rend_roi_head import PointRendRoIHead
from .roi_extractors import (BaseRoIExtractor, GenericRoIExtractor,
SingleRoIExtractor)
from .scnet_roi_head import SCNetRoIHead
from .shared_heads import ResLayer
from .sparse_roi_head import SparseRoIHead
from .standard_roi_head import StandardRoIHead
from .trident_roi_head import TridentRoIHead
from .aux_roi_head import AuxRoIHead
from .aux_base_roi_head import AuxBaseRoIHead
from .obb.obb_base_roi_head import OBBBaseRoIHead
from .obb.roitrans_roi_head import RoITransRoIHead
from .obb.obb_standard_roi_head import OBBStandardRoIHead
from .obb.gv_ratio_roi_head import GVRatioRoIHead
from .obb.obb_double_roi_head import OBBDoubleHeadRoIHead
__all__ = [
'BaseRoIHead', 'CascadeRoIHead', 'DoubleHeadRoIHead', 'MaskScoringRoIHead',
'HybridTaskCascadeRoIHead', 'GridRoIHead', 'ResLayer', 'BBoxHead',
'ConvFCBBoxHead', 'DIIHead', 'SABLHead', 'Shared2FCBBoxHead',
'StandardRoIHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead',
'FCNMaskHead', 'HTCMaskHead', 'FusedSemanticHead', 'GridHead',
'MaskIoUHead', 'BaseRoIExtractor', 'GenericRoIExtractor',
'SingleRoIExtractor', 'PISARoIHead', 'PointRendRoIHead', 'MaskPointHead',
'CoarseMaskHead', 'DynamicRoIHead', 'SparseRoIHead', 'TridentRoIHead',
'SCNetRoIHead', 'SCNetMaskHead', 'SCNetSemanticHead', 'SCNetBBoxHead',
'FeatureRelayHead', 'GlobalContextHead', 'AuxRoIHead', 'AuxBaseRoIHead',
'OBBDoubleHeadRoIHead', 'OBBBaseRoIHead', 'OBBStandardRoIHead', 'RoITransRoIHead', 'GVRatioRoIHead'
]
| 52.195652 | 103 | 0.772178 |
acf1f112ff58acd6928a2330538339ae0222ad3e | 8,996 | py | Python | data/label.py | ParikhKadam/HybridPose | 3d112425f9b6319c8f62dfd92bb38253fe4ffdcd | [
"MIT"
] | 369 | 2020-01-08T05:23:31.000Z | 2022-03-28T02:22:21.000Z | data/label.py | ParikhKadam/HybridPose | 3d112425f9b6319c8f62dfd92bb38253fe4ffdcd | [
"MIT"
] | 66 | 2020-01-21T12:58:19.000Z | 2022-03-14T07:50:01.000Z | data/label.py | ParikhKadam/HybridPose | 3d112425f9b6319c8f62dfd92bb38253fe4ffdcd | [
"MIT"
] | 64 | 2020-01-08T07:37:21.000Z | 2022-02-18T07:07:33.000Z | import argparse
import os
import cv2
import numpy as np
import pickle
import pdb
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--fx', type=float, default=700.)
parser.add_argument('--fy', type=float, default=700.)
parser.add_argument('--px', type=float, default=320.)
parser.add_argument('--py', type=float, default=240.)
parser.add_argument('--img_h', type=int, default=480)
parser.add_argument('--img_w', type=int, default=640)
parser.add_argument('--object_name', type=str, default='ape')
parser.add_argument('--orig_path', type=str, default='../../linemod')
parser.add_argument('--pvnet_linemod_path', type=str, default='..')
args = parser.parse_args()
return args
linemod_cls_names = ['ape', 'cam', 'cat', 'duck', 'glue', 'iron', 'phone', 'benchvise',
'can', 'driller', 'eggbox', 'holepuncher', 'lamp']
def get_num_examples(object_name):
return len(list(filter(lambda x: x.endswith('jpg'), os.listdir(object_name))))
def read_pose(object_name, example_id):
filename = os.path.join(object_name, '{}_RT.pkl'.format(example_id))
with open(filename, 'rb') as f:
pkl = pickle.load(f)
RT = pkl['RT']
R = np.matrix(RT[:, :3])
T = np.matrix(RT[:, -1]).transpose()
return R, T
def read_3d_points(filename, skip_normalize=False):
with open(filename) as f:
in_vertex_list = False
vertices = []
in_mm = False
for line in f:
if in_vertex_list:
vertex = line.split()[:3]
vertex = np.array([[float(vertex[0])],
[float(vertex[1])],
[float(vertex[2])]], dtype=np.float32)
if not skip_normalize:
if in_mm:
vertex = vertex / np.float32(10) # mm -> cm
vertex = vertex / np.float32(100)
vertices.append(vertex)
if len(vertices) >= vertex_count:
break
elif line.startswith('element vertex'):
vertex_count = int(line.split()[-1])
elif line.startswith('end_header'):
in_vertex_list = True
elif line.startswith('element face'):
in_mm = True
return vertices, in_mm
def parse_symmetry(filename):
with open(filename) as f:
lines = f.readlines()
point = lines[1].split()
point = (float(point[0]), float(point[1]), float(point[2]))
normal = lines[3].split()
normal = (float(normal[0]), float(normal[1]), float(normal[2]))
return point, normal
def get_camera_intrinsic_matrix(args):
return np.matrix([[args.fx, 0, args.px],
[0, args.fy, args.py],
[0, 0, 1]], dtype=np.float32)
def nearest_nonzero_idx_v2(a, x, y):
# https://stackoverflow.com/questions/43306291/find-the-nearest-nonzero-element-and-corresponding-index-in-a-2d-array
# x: (N,)
# y: (N,)
tmp = a[x, y]
a[x, y] = 0
r, c = np.nonzero(a)
r = r.reshape((1, -1)).repeat(x.shape[0], axis=0)
c = c.reshape((1, -1)).repeat(x.shape[0], axis=0)
a[x, y] = tmp
min_idx = ((r - x.reshape((-1, 1))) ** 2 + (c - y.reshape((-1, 1))) ** 2).argmin(axis=1)
return np.array([r[0, min_idx], c[0, min_idx]]).transpose()
def fill(im_in):
# based on https://www.learnopencv.com/filling-holes-in-an-image-using-opencv-python-c/
# Threshold.
# Set values equal to or above 220 to 0.
# Set values below 220 to 255.
th, im_th = cv2.threshold(im_in, 220, 255, cv2.THRESH_BINARY)
# Copy the thresholded image.
im_floodfill = im_th.copy()
# Mask used to flood filling.
# Notice the size needs to be 2 pixels than the image.
h, w = im_th.shape[:2]
mask = np.zeros((h+2, w+2), np.uint8)
# Floodfill from point (0, 0)
cv2.floodFill(im_floodfill, mask, (0,0), 255)
# Invert floodfilled image
im_floodfill_inv = cv2.bitwise_not(im_floodfill)
# Combine the two images to get the foreground.
im_out = im_th | im_floodfill_inv
return im_out
def main(args):
K = get_camera_intrinsic_matrix(args)
P_list, in_mm = read_3d_points(os.path.join(args.orig_path, 'original_dataset', args.object_name, 'mesh.ply'))
P_blender_list, _ = read_3d_points(os.path.join(args.pvnet_linemod_path, args.object_name, '{}.ply'.format(args.object_name)), skip_normalize=True)
O, n = parse_symmetry(os.path.join(args.orig_path, 'symmetries', args.object_name, 'symmetries.txt'))
if in_mm: # convert to m
O = (O[0] / 1000, O[1] / 1000, O[2] / 1000)
# for each 3D point P, find its correspondence P'
P_prime_list = []
for P_idx, P in enumerate(P_list):
PO = (O[0] - P[0], O[1] - P[1], O[2] - P[2])
dot_product = PO[0] * n[0] + PO[1] * n[1] + PO[2] * n[2]
P_prime = (P[0] + 2 * dot_product * n[0], P[1] + 2 * dot_product * n[1], P[2] + 2 * dot_product * n[2])
P_prime_list.append(P_prime)
P_prime_list = np.array(P_prime_list)
def project(P, R, T):
P_RT = R * P + T
p = K * P_RT
x = int(round(p[0, 0] / p[2, 0]))
y = int(round(p[1, 0] / p[2, 0]))
return (x, y, P_RT[2, 0])
num_examples = get_num_examples(args.object_name)
keypts_3d = np.load(os.path.join(args.orig_path, 'keypoints', args.object_name, 'keypoints_3d.npy'))
keypts_2d = np.zeros((num_examples, keypts_3d.shape[0], 2), dtype=np.float32)
rotation_transform = np.matrix([[1., 0., 0.],
[0., -1., 0.],
[0., 0., -1.]])
# get translation transform
# https://github.com/zju3dv/pvnet/blob/abc3f07cfcf352df3d718f10944213e1cde02db1/lib/utils/base_utils.py#L110
orig_model = np.array(P_list)[:, :, 0]
blender_model = np.array(P_blender_list)[:, :, 0]
blender_model = np.dot(blender_model, rotation_transform.T)
translation_transform = np.mean(orig_model, axis=0) - np.mean(blender_model, axis=0)
translation_transform = translation_transform.transpose()
os.makedirs('{}_labels'.format(args.object_name), exist_ok=True)
for example_id in range(num_examples):
print('example {}/{}'.format(example_id, num_examples), end='\r')
R, T = read_pose(args.object_name, example_id)
R = np.dot(R, rotation_transform)
T = T - np.dot(R, translation_transform)
# project 3D correspondeces to 2D
correspondences = np.zeros((args.img_h, args.img_w, 2), dtype=np.int16)
z_buffer = np.zeros((args.img_h, args.img_w), dtype=np.float32)
is_filled = np.zeros((args.img_h, args.img_w), dtype=np.uint8)
sample = example_id == 0
if sample:
img = cv2.imread('{}/{}.jpg'.format(args.object_name, example_id))
mask = np.zeros((args.img_h, args.img_w), dtype=np.uint8)
for P_idx, P in enumerate(P_list):
P_prime = P_prime_list[P_idx]
(x, y, z) = project(P, R, T)
(x_prime, y_prime, _) = project(P_prime, R, T)
if y >= 0 and y < args.img_h and x >= 0 and x < args.img_w:
if is_filled[y, x] == 0 or z_buffer[y, x] > z:
# I did a simple experiment: a smaller z is closer to the camera than a bigger z
is_filled[y, x] = 1
z_buffer[y, x] = z
delta_x = x_prime - x
delta_y = y_prime - y
correspondences[y, x, 0] = delta_x
correspondences[y, x, 1] = delta_y
if sample and P_idx % 50 == 0:
# 1 sample every 50 points
# color: red, thickness: 1
img = cv2.line(img, (x, y), (x_prime, y_prime), (0, 0, 255), 1)
mask[y, x] = 255
mask = fill(mask)
cv2.imwrite('{}_labels/mask{}.png'.format(args.object_name, example_id), mask)
yx = np.argwhere((mask != 0) & (is_filled == 0.))
if yx.shape[0] > 0:
yx_ = nearest_nonzero_idx_v2(is_filled, yx[:, 0], yx[:, 1])
for i in range(yx.shape[0]):
y, x = yx[i]
y_, x_ = yx_[i]
correspondences[y, x] = correspondences[y_, x_]
np.save('{}_labels/cor{}.npy'.format(args.object_name, example_id),
correspondences)
if sample:
cv2.imwrite('cor_{}.jpg'.format(args.object_name), img)
# project 3D keypoints to 2D
for keypt_idx in range(keypts_3d.shape[0]):
(x, y, _) = project(keypts_3d[keypt_idx].reshape((3, 1)), R, T)
keypts_2d[example_id, keypt_idx, 0] = x
keypts_2d[example_id, keypt_idx, 1] = y
np.save(os.path.join('{}_labels'.format(args.object_name), 'keypoints_2d.npy'), keypts_2d)
if __name__ == '__main__':
args = parse_args()
main(args)
| 43.25 | 151 | 0.574922 |
acf1f1188bcb45518d485d3dc76ab6ab00c67280 | 18,027 | py | Python | h2o-py/tests/testdir_algos/automl/pyunit_automl_model_selection.py | uakarsh/h2o-3 | 30dc00950692cc71d8803a5ffc565aeb4bd3ac6f | [
"Apache-2.0"
] | null | null | null | h2o-py/tests/testdir_algos/automl/pyunit_automl_model_selection.py | uakarsh/h2o-3 | 30dc00950692cc71d8803a5ffc565aeb4bd3ac6f | [
"Apache-2.0"
] | null | null | null | h2o-py/tests/testdir_algos/automl/pyunit_automl_model_selection.py | uakarsh/h2o-3 | 30dc00950692cc71d8803a5ffc565aeb4bd3ac6f | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
import sys, os
import re
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
import h2o.exceptions
from tests import pyunit_utils as pu
from h2o.automl import H2OAutoML
"""This test suite checks the AutoML parameters influencing the model selection pipeline"""
max_models = 5
def import_dataset(seed=0, larger=False):
df = h2o.import_file(path=pu.locate("smalldata/prostate/{}".format("prostate_complete.csv.zip" if larger else "prostate.csv")))
target = "CAPSULE"
df[target] = df[target].asfactor()
#Split frames
fr = df.split_frame(ratios=[.8,.1], seed=seed)
#Set up train, validation, and test sets
return pu.ns(train=fr[0], valid=fr[1], test=fr[2], target=target, target_idx=1)
def get_partitioned_model_names(leaderboard):
model_names = [leaderboard[i, 0] for i in range(0, (leaderboard.nrows))]
se_model_names = [m for m in model_names if m.startswith('StackedEnsemble')]
non_se_model_names = [m for m in model_names if m not in se_model_names]
return model_names, non_se_model_names, se_model_names
def test_exclude_algos():
print("AutoML doesn't train models for algos listed in exclude_algos")
ds = import_dataset()
aml = H2OAutoML(project_name="py_exclude_algos",
exclude_algos=['DRF', 'GLM'],
max_models=max_models,
seed=1)
aml.train(y=ds.target, training_frame=ds.train, validation_frame=ds.valid)
_, non_se, se = get_partitioned_model_names(aml.leaderboard)
assert not any(['DRF' in name or 'GLM' in name for name in non_se])
assert len(se) >= 1
def test_include_algos():
print("AutoML trains only models for algos listed in include_algos")
ds = import_dataset()
aml = H2OAutoML(project_name="py_include_algos",
include_algos=['GBM'],
max_models=max_models,
seed=1)
aml.train(y=ds.target, training_frame=ds.train, validation_frame=ds.valid)
_, non_se, se = get_partitioned_model_names(aml.leaderboard)
assert all(['GBM' in name for name in non_se])
assert len(se) == 0, "No StackedEnsemble should have been trained if not explicitly included to the existing include_algos"
def test_include_exclude_algos():
print("include_algos and exclude_algos parameters are mutually exclusive")
try:
H2OAutoML(project_name="py_include_exclude_algos",
exclude_algos=['DRF', 'XGBoost'],
include_algos=['GBM'],
max_models=max_models,
seed=1)
assert False, "Should have thrown AssertionError"
except AssertionError as e:
assert "Use either `exclude_algos` or `include_algos`, not both" in str(e)
def test_bad_modeling_plan_using_full_syntax():
try:
H2OAutoML(modeling_plan=[
dict(steps=['def_1'])
])
except AssertionError as e:
assert "each definition must have a 'name' key" in str(e)
try:
H2OAutoML(modeling_plan=[
dict(name="GBM", steps=['def_1'], alias='defaults')
])
except AssertionError as e:
assert "each definition must have only 1 or 2 keys" in str(e)
try:
H2OAutoML(modeling_plan=[
dict(name="GBM", alias='all_steps')
])
except AssertionError as e:
assert "alias must be one of ['all', 'defaults', 'grids']" in str(e)
try:
H2OAutoML(modeling_plan=[
dict(name="GBM", dummy=['def_1'])
])
except AssertionError as e:
assert "steps definitions support only the following keys: name, alias, steps" in str(e)
try:
H2OAutoML(modeling_plan=[
dict(name="GBM", steps=['def_1'])
])
except AssertionError as e:
assert "each step must be a dict" in str(e)
try:
H2OAutoML(modeling_plan=[
dict(name="GBM", steps=[dict(foo='def_1')])
])
except AssertionError as e:
assert "each step must have an 'id' key" in str(e)
try:
H2OAutoML(modeling_plan=[
dict(name="GBM", steps=[dict(id='def_1', weight=3/4)])
])
except AssertionError as e:
assert "weight must be an integer" in str(e)
def test_bad_modeling_plan_using_simplified_syntax():
try:
H2OAutoML(modeling_plan=[
['GBM']
])
except h2o.exceptions.H2OTypeError:
pass
try:
H2OAutoML(modeling_plan=[
('GBM', 'defaults', ['def_1'])
])
except AssertionError:
pass
try:
H2OAutoML(modeling_plan=[
('GBM', 'dummy_alias')
])
except h2o.exceptions.H2OTypeError:
pass
try:
H2OAutoML(modeling_plan=[
('GBM', ('def_1', 'def_2'))
])
except h2o.exceptions.H2OTypeError:
pass
def test_modeling_plan_using_full_syntax():
ds = import_dataset()
aml = H2OAutoML(project_name="py_modeling_plan_full_syntax",
max_models=3,
modeling_plan=[
dict(name='GLM', steps=[dict(id='def_1')]),
dict(name='GBM', alias='grids'),
dict(name='DRF', steps=[dict(id='def_1', group=5, weight=333)]), # just testing that it is parsed correctly on backend (no model will be built due to the priority group + max_models)
dict(name='GBM', steps=[dict(id="def_1")]),
],
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
print(aml.leaderboard)
_, non_se, se = get_partitioned_model_names(aml.leaderboard)
assert len(non_se) == 3
assert len(se) == 0
assert any('GLM' in name for name in non_se)
assert any('GBM' in name for name in non_se)
assert any('GBM_grid' in name for name in non_se)
def test_modeling_plan_using_simplified_syntax():
ds = import_dataset()
aml = H2OAutoML(project_name="py_modeling_plan_simple_syntax",
max_models=3,
modeling_plan=[
('DRF', ['XRT', 'def_1']),
('GBM', 'grids'),
('StackedEnsemble',)
],
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
print(aml.leaderboard)
_, non_se, se = get_partitioned_model_names(aml.leaderboard)
assert len(non_se) == 3
assert len(se) == 2
assert any('DRF' in name for name in non_se)
assert any('XRT' in name for name in non_se)
assert any('GBM_grid' in name for name in non_se)
assert len([name for name in se if 'BestOfFamily' in name]) == 2 # we should get a BoF for group1 + one after GBM grid group.
def test_modeling_plan_using_minimal_syntax():
ds = import_dataset()
aml = H2OAutoML(project_name="py_modeling_plan_minimal_syntax",
max_models=5,
modeling_plan=['DRF', 'GLM', ('GBM', 'grids'), 'StackedEnsemble'],
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
_, non_se, se = get_partitioned_model_names(aml.leaderboard)
assert len(non_se) == 5
assert len(se) == 2
assert any('DRF' in name for name in non_se)
assert any('XRT' in name for name in non_se)
assert any('GLM' in name for name in non_se)
assert any('GBM_grid' in name for name in non_se)
assert any('BestOfFamily' in name for name in se)
assert any('AllModels' in name for name in se)
def test_modeling_steps():
ds = import_dataset()
aml = H2OAutoML(project_name="py_modeling_steps",
max_models=5,
modeling_plan=['DRF',
dict(name='GBM', steps=[
dict(id='def_3', group=2),
dict(id='grid_1', weight=77)
]),
('GLM', 'defaults'),
'StackedEnsemble'],
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
print(aml.leaderboard)
# we should now see the detailed steps sorted in their execution order.
print(aml.modeling_steps)
assert aml.modeling_steps == [
{'name': 'DRF', 'steps': [{'id': 'def_1', 'group': 1, 'weight': 10},
{'id': 'XRT', 'group': 1, 'weight': 10}]},
{'name': 'GLM', 'steps': [{'id': 'def_1', 'group': 1, 'weight': 10}]},
{'name': 'StackedEnsemble', 'steps': [{'id': 'best_of_family_1', 'group': 1, 'weight': 10}]}, # no all_1 as XRT is interpreted as not being of the same family as DRF (legacy decision).
{'name': 'GBM', 'steps': [{'id': 'def_3', 'group': 2, 'weight': 10},
{'id': 'grid_1', 'group': 2, 'weight': 77}]}, # grids are 2nd group by default
{'name': 'StackedEnsemble', 'steps': [{'id': 'best_of_family_2', 'group': 2, 'weight': 10},
{'id': 'all_2', 'group': 2, 'weight': 10}]}
]
new_aml = H2OAutoML(project_name="py_reinject_modeling_steps",
max_models=5,
modeling_plan=aml.modeling_steps,
seed=1)
new_aml.train(y=ds.target, training_frame=ds.train)
print(new_aml.leaderboard)
assert aml.modeling_steps == new_aml.modeling_steps
def test_exclude_algos_is_applied_on_top_of_modeling_plan():
ds = import_dataset()
aml = H2OAutoML(project_name="py_modeling_plan_minimal_syntax",
max_models=5,
modeling_plan=['DRF', 'GLM', ('GBM', 'grids'), 'StackedEnsemble'],
exclude_algos=['GBM', 'StackedEnsemble'],
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
_, non_se, se = get_partitioned_model_names(aml.leaderboard)
assert len(non_se) == 3
assert len(se) == 0
def test_monotone_constraints():
ds = import_dataset()
aml = H2OAutoML(project_name="py_monotone_constraints",
monotone_constraints=dict(AGE=1, VOL=-1), # constraints just for the sake of testing
max_models=6,
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
model_names, _, _ = get_partitioned_model_names(aml.leaderboard)
models_supporting_monotone_constraints = [n for n in model_names if re.match(r"GBM|XGBoost", n)]
assert len(models_supporting_monotone_constraints) < len(model_names), \
"models not supporting the constraint should not have been skipped"
for m in models_supporting_monotone_constraints:
model = h2o.get_model(m)
value = next(v['actual'] for n, v in model.params.items() if n == 'monotone_constraints')
assert isinstance(value, list)
assert len(value) == 2
age = next((v for v in value if v['key'] == 'AGE'), None)
assert age is not None
assert age['value'] == 1.0
vol = next((v for v in value if v['key'] == 'VOL'), None)
assert vol is not None
assert vol['value'] == -1.0
def test_monotone_constraints_can_be_passed_as_algo_parameter():
ds = import_dataset()
aml = H2OAutoML(project_name="py_monotone_constraints",
algo_parameters=dict(
monotone_constraints=dict(AGE=1, VOL=-1), # constraints just for the sake of testing
# ntrees=10,
),
max_models=6,
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
model_names, _, _ = get_partitioned_model_names(aml.leaderboard)
models_supporting_monotone_constraints = [n for n in model_names if re.match(r"GBM|XGBoost", n)]
assert len(models_supporting_monotone_constraints) < len(model_names), \
"models not supporting the constraint should not have been skipped"
for m in models_supporting_monotone_constraints:
model = h2o.get_model(m)
value = next(v['actual'] for n, v in model.params.items() if n == 'monotone_constraints')
# print(param)
assert isinstance(value, list)
assert len(value) == 2
age = next((v for v in value if v['key'] == 'AGE'), None)
assert age is not None
assert age['value'] == 1.0
vol = next((v for v in value if v['key'] == 'VOL'), None)
assert vol is not None
assert vol['value'] == -1.0
# models_supporting_ntrees = [n for n in model_names if re.match(r"DRF|GBM|XGBoost|XRT", n)]
# assert len(models_supporting_ntrees) > 0
# for m in models_supporting_ntrees:
# model = h2o.get_model(m)
# value = next(v['actual'] for n, v in model.params.items() if n == 'ntrees')
# assert value == 10
def test_algo_parameter_can_be_applied_only_to_a_specific_algo():
ds = import_dataset()
aml = H2OAutoML(project_name="py_specific_algo_param",
algo_parameters=dict(
GBM__monotone_constraints=dict(AGE=1)
),
max_models=6,
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
model_names, _, _ = get_partitioned_model_names(aml.leaderboard)
models_supporting_monotone_constraints = [n for n in model_names if re.match(r"GBM|XGBoost", n)]
assert next((m for m in models_supporting_monotone_constraints if m.startswith('GBM')), None), "There should be at least one GBM model"
for m in models_supporting_monotone_constraints:
model = h2o.get_model(m)
mc_value = next(v['actual'] for n, v in model.params.items() if n == 'monotone_constraints')
if m.startswith('GBM'):
assert isinstance(mc_value, list)
age = next((v for v in mc_value if v['key'] == 'AGE'), None)
assert age is not None
assert age['value'] == 1.0
else:
assert mc_value is None
def test_cannot_set_unauthorized_algo_parameter():
ds = import_dataset()
aml = H2OAutoML(project_name="py_unauthorized_algo_param",
algo_parameters=dict(
score_tree_interval=7
),
max_models=6,
seed=1)
try:
aml.train(y=ds.target, training_frame=ds.train)
except h2o.exceptions.H2OResponseError as e:
assert "algo_parameters: score_tree_interval" in str(e)
def test_exploitation_disabled():
ds = import_dataset()
aml = H2OAutoML(project_name="py_exploitation_ratio_disabled",
exploitation_ratio=.0,
max_models=6,
seed=1)
aml.train(y=ds.target, training_frame=ds.train)
assert 'start_GBM_lr_annealing' not in aml.training_info
assert 'start_XGBoost_lr_search' not in aml.training_info
def test_exploitation_doesnt_impact_max_models():
ds = import_dataset()
aml = H2OAutoML(project_name="py_exploitation_ratio_max_models",
exploitation_ratio=.1,
max_models=6,
seed=1, verbosity='debug')
aml.train(y=ds.target, training_frame=ds.train)
print(aml.leaderboard)
_, non_se, se = get_partitioned_model_names(aml.leaderboard)
assert len(non_se) == 6
assert len(se) == 5 # that's because we have 2 additional SEs after exploitation phase
print(aml.training_info)
assert 'start_GBM_lr_annealing' in aml.training_info
assert 'start_XGBoost_lr_search' in aml.training_info
def test_exploitation_impacts_exploration_duration():
ds = import_dataset()
planned_duration = 60
aml = H2OAutoML(project_name="py_exploitation_ratio_max_runtime",
exploitation_ratio=.5, # excessive ratio on purpose, due to training overheads in multinode
exclude_algos=['DeepLearning', 'XGBoost'], # removing some algos for the same reason
max_runtime_secs=planned_duration,
seed=1,
verbosity='info'
)
aml.train(y=ds.target, training_frame=ds.train)
automl_start = int(aml.training_info['start_epoch'])
assert 'start_GBM_lr_annealing' in aml.training_info
# assert 'start_XGBoost_lr_search' in aml.training_info
first_exploitation_step = 'start_GBM_lr_annealing'
after_exploitation_step = 'start_completion_GBM_grid_1'
if first_exploitation_step in aml.training_info and after_exploitation_step in aml.training_info:
exploitation_start = int(aml.training_info[first_exploitation_step])
exploration_duration = exploitation_start - automl_start
after_start = int(aml.training_info[after_exploitation_step])
exploitation_duration = after_start - exploitation_start
# can't reliably check duration ratio
assert 0 < exploration_duration < planned_duration
print(aml.leaderboard)
print(exploitation_duration)
print(exploration_duration)
assert 0 < exploitation_duration < exploration_duration
else:
print(aml.leaderboard)
print("budget time was too small to start and complete exploitation")
pu.run_tests([
test_exclude_algos,
test_include_algos,
test_include_exclude_algos,
test_bad_modeling_plan_using_full_syntax,
test_bad_modeling_plan_using_simplified_syntax,
test_modeling_plan_using_full_syntax,
test_modeling_plan_using_simplified_syntax,
test_modeling_plan_using_minimal_syntax,
test_modeling_steps,
test_exclude_algos_is_applied_on_top_of_modeling_plan,
test_monotone_constraints,
test_monotone_constraints_can_be_passed_as_algo_parameter,
test_algo_parameter_can_be_applied_only_to_a_specific_algo,
test_cannot_set_unauthorized_algo_parameter,
test_exploitation_disabled,
test_exploitation_doesnt_impact_max_models,
test_exploitation_impacts_exploration_duration,
])
| 41.251716 | 207 | 0.626061 |
acf1f148d8aee4f844bce56366b8d45ddeb1792f | 3,222 | py | Python | WildCardIndex.py | Juan-Chen45/Information-Retrieval-Project | 0d154ee18330f946736ec9fd9c9a8394f34aebed | [
"MIT"
] | null | null | null | WildCardIndex.py | Juan-Chen45/Information-Retrieval-Project | 0d154ee18330f946736ec9fd9c9a8394f34aebed | [
"MIT"
] | null | null | null | WildCardIndex.py | Juan-Chen45/Information-Retrieval-Project | 0d154ee18330f946736ec9fd9c9a8394f34aebed | [
"MIT"
] | null | null | null | import _pickle as Cpickle
import re
import os
import json
'''
直接使用了正则表达式的方式去进行通配符匹配
3-Gram索引构建空间过大,因此转而用正则进行匹配
'''
class WildCardIndex:
wildcard_index = None
@staticmethod
def init():
WildCardIndex.wildcard_index = {}
listFile = os.walk(r"IRProjectdata")
reg = r"data(\d+)\.json"
for dirPath, dirName, fileName in listFile:
fileName = sorted(fileName, key=lambda x: int(re.search(reg, x).group(1)))
for i in range(len(fileName)):
print(os.path.join(dirPath, fileName[i]))
WildCardIndex.construct(os.path.join(dirPath, fileName[i]), i + 1)
WildCardIndex.storeindex()
WildCardIndex.releasespace()
@staticmethod
def construct(fileName, doc_id):
with open(fileName, "r", encoding="utf-8")as f:
temp = json.load(f)["text"].split()
for i in range(len(temp)):
if temp[i] in WildCardIndex.wildcard_index:
WildCardIndex.wildcard_index[temp[i]].add(doc_id)
else:
WildCardIndex.wildcard_index[temp[i]] = set(doc_id)
@staticmethod
def releasespace():
WildCardIndex.wildcard_index = None
@staticmethod
def loadindex():
with open(r"Index/wildcardindex", "rb")as f:
WildCardIndex.wildcard_index = Cpickle.load(f)
@staticmethod
def storeindex():
with open(r"Index/wildcardindex", "wb")as f:
Cpickle.dump(WildCardIndex.wildcard_index, f)
@staticmethod
def findwordsdoc(rg):
ans = set()
for k in WildCardIndex.wildcard_index.keys():
if re.match(rg, k):
ans = ans | WildCardIndex.wildcard_index[k]
return ans
@staticmethod
def AND(currList, newList):
return currList & newList
@staticmethod
def OR(currList, newList):
return currList | newList
@staticmethod
def NOT(currList, newList):
return currList - newList
@staticmethod
def search(query):
if WildCardIndex.wildcard_index is None:
WildCardIndex.loadindex()
query_list = query.split()
str_list = [str for str in query_list if str != "AND" and str != "OR" and str != "NOT"]
str_doc_list = []
for str in str_list:
if "*" in str:
str_doc_list.append(WildCardIndex.findwordsdoc(str.replace("*",".*")))
else:
str_doc_list.append(WildCardIndex.wildcard_index.get(str, {}))
i = 1
j = 1
rtn = str_doc_list[0]
while j < len(query_list) and i < len(str_doc_list):
if query_list[j] == "AND":
rtn = WildCardIndex.AND(rtn, str_doc_list[i])
elif query_list[j] == "OR":
rtn = WildCardIndex.OR(rtn, str_doc_list[i])
elif query_list[j] == "NOT":
rtn = WildCardIndex.NOT(rtn, query_list[i])
i += 1
j += 2
return list(rtn)
if __name__ == "__main__":
# WildCardIndex.init()
print(WildCardIndex.search("bi*in"))
| 31.588235 | 96 | 0.563315 |
acf1f33f315801d69ae4f6e720817d8c002dd029 | 18,896 | py | Python | tests/tracer/test_writer.py | askkaz/dd-trace-py | 5e6fc1ba230bc30d089b3d90eafe00edbf52160a | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/tracer/test_writer.py | askkaz/dd-trace-py | 5e6fc1ba230bc30d089b3d90eafe00edbf52160a | [
"Apache-2.0",
"BSD-3-Clause"
] | 9 | 2021-07-26T01:22:38.000Z | 2022-03-21T19:20:53.000Z | tests/tracer/test_writer.py | askkaz/dd-trace-py | 5e6fc1ba230bc30d089b3d90eafe00edbf52160a | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | import os
import socket
import tempfile
import threading
import time
import mock
import msgpack
import pytest
from six.moves import BaseHTTPServer
from six.moves import socketserver
from ddtrace.constants import KEEP_SPANS_RATE_KEY
from ddtrace.internal.compat import PY3
from ddtrace.internal.compat import get_connection_response
from ddtrace.internal.compat import httplib
from ddtrace.internal.uds import UDSHTTPConnection
from ddtrace.internal.writer import AgentWriter
from ddtrace.internal.writer import LogWriter
from ddtrace.internal.writer import Response
from ddtrace.internal.writer import _human_size
from ddtrace.span import Span
from tests.utils import AnyInt
from tests.utils import BaseTestCase
class DummyOutput:
def __init__(self):
self.entries = []
def write(self, message):
self.entries.append(message)
def flush(self):
pass
class AgentWriterTests(BaseTestCase):
N_TRACES = 11
def test_metrics_disabled(self):
statsd = mock.Mock()
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=False)
for i in range(10):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.stop()
writer.join()
statsd.increment.assert_not_called()
statsd.distribution.assert_not_called()
def test_metrics_bad_endpoint(self):
statsd = mock.Mock()
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=True)
for i in range(10):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.stop()
writer.join()
statsd.distribution.assert_has_calls(
[
mock.call("datadog.tracer.buffer.accepted.traces", 10, tags=[]),
mock.call("datadog.tracer.buffer.accepted.spans", 50, tags=[]),
mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]),
mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]),
mock.call("datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]),
],
any_order=True,
)
def test_metrics_trace_too_big(self):
statsd = mock.Mock()
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=True)
for i in range(10):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.write(
[Span(tracer=None, name="a" * 5000, trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(2 ** 10)]
)
writer.stop()
writer.join()
statsd.distribution.assert_has_calls(
[
mock.call("datadog.tracer.buffer.accepted.traces", 10, tags=[]),
mock.call("datadog.tracer.buffer.accepted.spans", 50, tags=[]),
mock.call("datadog.tracer.buffer.dropped.traces", 1, tags=["reason:t_too_big"]),
mock.call("datadog.tracer.buffer.dropped.bytes", AnyInt(), tags=["reason:t_too_big"]),
mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]),
mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]),
mock.call("datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]),
],
any_order=True,
)
def test_metrics_multi(self):
statsd = mock.Mock()
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=True)
for i in range(10):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.flush_queue()
statsd.distribution.assert_has_calls(
[
mock.call("datadog.tracer.buffer.accepted.traces", 10, tags=[]),
mock.call("datadog.tracer.buffer.accepted.spans", 50, tags=[]),
mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]),
mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]),
mock.call("datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]),
],
any_order=True,
)
statsd.reset_mock()
for i in range(10):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.stop()
writer.join()
statsd.distribution.assert_has_calls(
[
mock.call("datadog.tracer.buffer.accepted.traces", 10, tags=[]),
mock.call("datadog.tracer.buffer.accepted.spans", 50, tags=[]),
mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]),
mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]),
mock.call("datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]),
],
any_order=True,
)
def test_write_sync(self):
statsd = mock.Mock()
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=True, sync_mode=True)
writer.write([Span(tracer=None, name="name", trace_id=1, span_id=j, parent_id=j - 1 or None) for j in range(5)])
statsd.distribution.assert_has_calls(
[
mock.call("datadog.tracer.buffer.accepted.traces", 1, tags=[]),
mock.call("datadog.tracer.buffer.accepted.spans", 5, tags=[]),
mock.call("datadog.tracer.http.requests", writer.RETRY_ATTEMPTS, tags=[]),
mock.call("datadog.tracer.http.errors", 1, tags=["type:err"]),
mock.call("datadog.tracer.http.dropped.bytes", AnyInt(), tags=[]),
],
any_order=True,
)
def test_drop_reason_bad_endpoint(self):
statsd = mock.Mock()
writer_metrics_reset = mock.Mock()
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=False)
writer._metrics_reset = writer_metrics_reset
for i in range(10):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.stop()
writer.join()
writer_metrics_reset.assert_called_once()
assert 1 == writer._metrics["http.errors"]["count"]
assert 10 == writer._metrics["http.dropped.traces"]["count"]
def test_drop_reason_trace_too_big(self):
statsd = mock.Mock()
writer_metrics_reset = mock.Mock()
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=False)
writer._metrics_reset = writer_metrics_reset
for i in range(10):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.write(
[Span(tracer=None, name="a" * 5000, trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(2 ** 10)]
)
writer.stop()
writer.join()
writer_metrics_reset.assert_called_once()
assert 1 == writer._metrics["buffer.dropped.traces"]["count"]
assert ["reason:t_too_big"] == writer._metrics["buffer.dropped.traces"]["tags"]
def test_drop_reason_buffer_full(self):
statsd = mock.Mock()
writer_metrics_reset = mock.Mock()
writer = AgentWriter(agent_url="http://asdf:1234", buffer_size=5300, dogstatsd=statsd, report_metrics=False)
writer._metrics_reset = writer_metrics_reset
for i in range(10):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.write([Span(tracer=None, name="a", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)])
writer.stop()
writer.join()
writer_metrics_reset.assert_called_once()
assert 1 == writer._metrics["buffer.dropped.traces"]["count"]
assert ["reason:full"] == writer._metrics["buffer.dropped.traces"]["tags"]
def test_drop_reason_encoding_error(self):
n_traces = 10
statsd = mock.Mock()
writer_encoder = mock.Mock()
writer_encoder.__len__ = (lambda *args: n_traces).__get__(writer_encoder)
writer_metrics_reset = mock.Mock()
writer_encoder.encode.side_effect = Exception
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=False)
writer._encoder = writer_encoder
writer._metrics_reset = writer_metrics_reset
for i in range(n_traces):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
)
writer.stop()
writer.join()
writer_metrics_reset.assert_called_once()
assert 10 == writer._metrics["encoder.dropped.traces"]["count"]
def test_keep_rate(self):
statsd = mock.Mock()
writer_run_periodic = mock.Mock()
writer_put = mock.Mock()
writer_put.return_value = Response(status=200)
writer = AgentWriter(agent_url="http://asdf:1234", dogstatsd=statsd, report_metrics=False)
writer.run_periodic = writer_run_periodic
writer._put = writer_put
traces = [
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(5)]
for i in range(4)
]
traces_too_big = [
[Span(tracer=None, name="a" * 5000, trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(2 ** 10)]
for i in range(4)
]
# 1. We write 4 traces successfully.
for trace in traces:
writer.write(trace)
writer.flush_queue()
payload = msgpack.unpackb(writer_put.call_args.args[0])
# No previous drops.
assert 0.0 == writer._drop_sma.get()
# 4 traces written.
assert 4 == len(payload)
# 100% of traces kept (refers to the past).
# No traces sent before now so 100% kept.
for trace in payload:
assert 1.0 == trace[0]["metrics"].get(KEEP_SPANS_RATE_KEY, -1)
# 2. We fail to write 4 traces because of size limitation.
for trace in traces_too_big:
writer.write(trace)
writer.flush_queue()
# 50% of traces were dropped historically.
# 4 successfully written before and 4 dropped now.
assert 0.5 == writer._drop_sma.get()
# put not called since no new traces are available.
writer_put.assert_called_once()
# 3. We write 2 traces successfully.
for trace in traces[:2]:
writer.write(trace)
writer.flush_queue()
payload = msgpack.unpackb(writer_put.call_args.args[0])
# 40% of traces were dropped historically.
assert 0.4 == writer._drop_sma.get()
# 2 traces written.
assert 2 == len(payload)
# 50% of traces kept (refers to the past).
# We had 4 successfully written and 4 dropped.
for trace in payload:
assert 0.5 == trace[0]["metrics"].get(KEEP_SPANS_RATE_KEY, -1)
# 4. We write 1 trace successfully and fail to write 3.
writer.write(traces[0])
for trace in traces_too_big[:3]:
writer.write(trace)
writer.flush_queue()
payload = msgpack.unpackb(writer_put.call_args.args[0])
# 50% of traces were dropped historically.
assert 0.5 == writer._drop_sma.get()
# 1 trace written.
assert 1 == len(payload)
# 60% of traces kept (refers to the past).
# We had 4 successfully written, then 4 dropped, then 2 written.
for trace in payload:
assert 0.6 == trace[0]["metrics"].get(KEEP_SPANS_RATE_KEY, -1)
class LogWriterTests(BaseTestCase):
N_TRACES = 11
def create_writer(self):
self.output = DummyOutput()
writer = LogWriter(out=self.output)
for i in range(self.N_TRACES):
writer.write(
[Span(tracer=None, name="name", trace_id=i, span_id=j, parent_id=j - 1 or None) for j in range(7)]
)
return writer
def test_log_writer(self):
self.create_writer()
self.assertEqual(len(self.output.entries), self.N_TRACES)
def test_humansize():
assert _human_size(0) == "0B"
assert _human_size(999) == "999B"
assert _human_size(1000) == "1KB"
assert _human_size(10000) == "10KB"
assert _human_size(100000) == "100KB"
assert _human_size(1000000) == "1MB"
assert _human_size(10000000) == "10MB"
assert _human_size(1000000000) == "1GB"
class _BaseHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
error_message_format = "%(message)s\n"
error_content_type = "text/plain"
@staticmethod
def log_message(format, *args): # noqa: A002
pass
class _APIEndpointRequestHandlerTest(_BaseHTTPRequestHandler):
expected_path_prefix = None
def do_PUT(self):
if self.expected_path_prefix is not None:
assert self.path.startswith(self.expected_path_prefix)
self.send_error(200, "OK")
class _TimeoutAPIEndpointRequestHandlerTest(_BaseHTTPRequestHandler):
def do_PUT(self):
# This server sleeps longer than our timeout
time.sleep(5)
class _ResetAPIEndpointRequestHandlerTest(_BaseHTTPRequestHandler):
def do_PUT(self):
return
_HOST = "0.0.0.0"
_PORT = 8743
_TIMEOUT_PORT = _PORT + 1
_RESET_PORT = _TIMEOUT_PORT + 1
class UDSHTTPServer(socketserver.UnixStreamServer, BaseHTTPServer.HTTPServer):
def server_bind(self):
BaseHTTPServer.HTTPServer.server_bind(self)
def _make_uds_server(path, request_handler):
server = UDSHTTPServer(path, request_handler)
t = threading.Thread(target=server.serve_forever)
# Set daemon just in case something fails
t.daemon = True
t.start()
# Wait for the server to start
resp = None
while resp != 200:
conn = UDSHTTPConnection(server.server_address, _HOST, 2019)
try:
conn.request("PUT", "/")
resp = get_connection_response(conn).status
finally:
conn.close()
time.sleep(0.01)
return server, t
@pytest.fixture
def endpoint_uds_server():
socket_name = tempfile.mktemp()
handler = _APIEndpointRequestHandlerTest
server, thread = _make_uds_server(socket_name, handler)
handler.expected_path_prefix = "/v0."
try:
yield server
finally:
handler.expected_path_prefix = None
server.shutdown()
thread.join()
os.unlink(socket_name)
def _make_server(port, request_handler):
server = BaseHTTPServer.HTTPServer((_HOST, port), request_handler)
t = threading.Thread(target=server.serve_forever)
# Set daemon just in case something fails
t.daemon = True
t.start()
return server, t
@pytest.fixture(scope="module")
def endpoint_test_timeout_server():
server, thread = _make_server(_TIMEOUT_PORT, _TimeoutAPIEndpointRequestHandlerTest)
try:
yield thread
finally:
server.shutdown()
thread.join()
@pytest.fixture(scope="module")
def endpoint_test_reset_server():
server, thread = _make_server(_RESET_PORT, _ResetAPIEndpointRequestHandlerTest)
try:
yield thread
finally:
server.shutdown()
thread.join()
@pytest.fixture
def endpoint_assert_path():
handler = _APIEndpointRequestHandlerTest
server, thread = _make_server(_PORT, handler)
def configure(expected_path_prefix=None):
handler.expected_path_prefix = expected_path_prefix
return thread
try:
yield configure
finally:
handler.expected_path_prefix = None
server.shutdown()
thread.join()
def test_agent_url_path(endpoint_assert_path):
# test without base path
endpoint_assert_path("/v0.")
writer = AgentWriter(agent_url="http://%s:%s/" % (_HOST, _PORT))
writer._encoder.put([Span(None, "foobar")])
writer.flush_queue(raise_exc=True)
# test without base path nor trailing slash
writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, _PORT))
writer._encoder.put([Span(None, "foobar")])
writer.flush_queue(raise_exc=True)
# test with a base path
endpoint_assert_path("/test/v0.")
writer = AgentWriter(agent_url="http://%s:%s/test/" % (_HOST, _PORT))
writer._encoder.put([Span(None, "foobar")])
writer.flush_queue(raise_exc=True)
def test_flush_connection_timeout_connect():
writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, 2019))
if PY3:
exc_type = OSError
else:
exc_type = socket.error
with pytest.raises(exc_type):
writer._encoder.put([Span(None, "foobar")])
writer.flush_queue(raise_exc=True)
def test_flush_connection_timeout(endpoint_test_timeout_server):
writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, _TIMEOUT_PORT))
with pytest.raises(socket.timeout):
writer._encoder.put([Span(None, "foobar")])
writer.flush_queue(raise_exc=True)
def test_flush_connection_reset(endpoint_test_reset_server):
writer = AgentWriter(agent_url="http://%s:%s" % (_HOST, _RESET_PORT))
if PY3:
exc_types = (httplib.BadStatusLine, ConnectionResetError)
else:
exc_types = (httplib.BadStatusLine,)
with pytest.raises(exc_types):
writer._encoder.put([Span(None, "foobar")])
writer.flush_queue(raise_exc=True)
def test_flush_connection_uds(endpoint_uds_server):
writer = AgentWriter(agent_url="unix://%s" % endpoint_uds_server.server_address)
writer._encoder.put([Span(None, "foobar")])
writer.flush_queue(raise_exc=True)
def test_flush_queue_raise():
writer = AgentWriter(agent_url="http://dne:1234")
# Should not raise
writer.write([])
writer.flush_queue(raise_exc=False)
error = OSError if PY3 else IOError
with pytest.raises(error):
writer.write([])
writer.flush_queue(raise_exc=True)
def test_racing_start():
writer = AgentWriter(agent_url="http://dne:1234")
def do_write(i):
writer.write([Span(None, str(i))])
ts = [threading.Thread(target=do_write, args=(i,)) for i in range(100)]
for t in ts:
t.start()
for t in ts:
t.join()
assert len(writer._encoder) == 100
| 34.927911 | 120 | 0.631774 |
acf1f3a8ca5899f50c0e1910efa9219bf1b6f2dd | 1,422 | py | Python | setup.py | klihub/gen-config | 29c1b5114247190ca1101bc22c53ac28997d627d | [
"BSD-3-Clause"
] | null | null | null | setup.py | klihub/gen-config | 29c1b5114247190ca1101bc22c53ac28997d627d | [
"BSD-3-Clause"
] | null | null | null | setup.py | klihub/gen-config | 29c1b5114247190ca1101bc22c53ac28997d627d | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
from distutils.core import setup
import sys, os, glob
data_files = []
profile_dirs = glob.glob('src/profiles/*')
for p in profile_dirs:
module_files = glob.glob(os.path.join(p, 'modules', '*.py'))
if not module_files:
continue
profile = p.split('src/profiles/')[1]
profile_dir = os.path.join('share', 'gen-config',
'profiles', profile, 'modules')
modules = [x for x in module_files]
data_files.append((profile_dir, modules))
hook_dst = os.path.join('share', 'gen-config', 'hooks')
hook_src = os.path.join('src', 'hooks')
hooks = glob.glob(os.path.join(hook_src, '???*'))
if hooks:
data_files.append((hook_dst, hooks))
#service_dst = os.path.join('lib', 'systemd', 'system')
#service_src = os.path.join('src', 'systemd')
#services = glob.glob(os.path.join(service_src, '*.service'))
#if services:
# data_files.append((service_dst, services))
for df in data_files:
print('data file set %s: %s' % (df[0], df[1]))
setup(name = 'genconfig',
version = '0.0',
description = 'Configuration Generator/General Configurator Framework',
author = 'Krisztian Litkey',
author_email = 'kli@iki.fi',
url = 'https://github.com/klihub/gen-config.git',
packages = ['genconfig'],
scripts = ['src/gen-config'],
package_dir = { 'genconfig': 'src/genconfig' },
data_files = data_files,
)
| 31.6 | 77 | 0.636428 |
acf1f41555f4f270b2849f5b7bad501045523df1 | 615 | py | Python | solr-admin-app/app.py | ericminio/namex | 09da6b5f6476494d13ec2243934c51e0423340cd | [
"Apache-2.0"
] | null | null | null | solr-admin-app/app.py | ericminio/namex | 09da6b5f6476494d13ec2243934c51e0423340cd | [
"Apache-2.0"
] | null | null | null | solr-admin-app/app.py | ericminio/namex | 09da6b5f6476494d13ec2243934c51e0423340cd | [
"Apache-2.0"
] | null | null | null |
import logging
import dotenv
import monkeypatch
import solr_admin
# Load all the environment variables from a .env file located in the nearest directory above.
dotenv.load_dotenv(dotenv.find_dotenv(), override=True)
# Leave this as DEBUG for now.
logging.basicConfig(level=logging.DEBUG)
# Do the unpleasant but necessary library monkeypatching.
monkeypatch.patch_ca_certs()
# Listen on all interfaces, and the catalog Python container expects the application to be on 8080.
application = solr_admin.create_application()
if __name__ == '__main__':
application.run(host='0.0.0.0', port=8080, debug=True)
| 26.73913 | 99 | 0.788618 |
acf1f4768116dc3a5e88ad7a995a3975089905d1 | 1,968 | py | Python | invenio_app_ils/patrons/cli.py | masonproffitt/invenio-app-ils | 81dd12aa774d7d70096de77cc526d9b4ca614437 | [
"MIT"
] | null | null | null | invenio_app_ils/patrons/cli.py | masonproffitt/invenio-app-ils | 81dd12aa774d7d70096de77cc526d9b4ca614437 | [
"MIT"
] | null | null | null | invenio_app_ils/patrons/cli.py | masonproffitt/invenio-app-ils | 81dd12aa774d7d70096de77cc526d9b4ca614437 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Patrons CLI module."""
from pprint import pprint
import click
from flask.cli import with_appcontext
from invenio_app_ils.proxies import current_app_ils
from .anonymization import anonymize_patron_data, get_patron_activity
@click.group()
def patrons():
"""Patrons data CLI."""
@patrons.command()
@with_appcontext
def index():
"""Reindex all patrons."""
click.secho("Started reindexing patrons...", fg="green")
n_patrons = current_app_ils.patron_indexer.reindex_patrons()
click.secho("Reindexed {} patrons".format(n_patrons), fg="green")
@patrons.command()
@click.option("--patron-pid", help="Give patron pid.")
@with_appcontext
def list_activity(patron_pid):
"""List patron's data and activity."""
patron_activity = get_patron_activity(patron_pid)
if not patron_activity:
print("The patron with pid", patron_pid, "does not exist.")
return
pprint(patron_activity)
@patrons.command()
@click.option("--patron-pid", help="Give patron pid.")
@click.option(
"--force",
is_flag=True,
default=False,
help="Try to anonymize data by using the given patron PID even if "
"the Invenio user does not exist.",
)
@with_appcontext
def anonymize(patron_pid, force):
"""Anonymize patron's data and activity."""
if click.confirm("Are you sure you want to anonymize this patron?"):
dropped, indices, emails = anonymize_patron_data(patron_pid, force)
msg = (
"Successfully anonymized patron's activity: {dropped} rows "
"deleted from db, {indices} records re-indexed and {emails} emails"
" anonymized.".format(
dropped=dropped, indices=indices, emails=emails
)
)
click.secho(msg, fg="green")
| 28.521739 | 79 | 0.683435 |
acf1f47914ec9e88c2bf944043ccd8bb7b48dd01 | 1,800 | py | Python | test/persistent_database_writer/test_data/upload_payload.py | meerkat-code/meerkat_tunnel | ff15880b6d388e79d95a60cb67c1ea5fb15089db | [
"MIT"
] | null | null | null | test/persistent_database_writer/test_data/upload_payload.py | meerkat-code/meerkat_tunnel | ff15880b6d388e79d95a60cb67c1ea5fb15089db | [
"MIT"
] | null | null | null | test/persistent_database_writer/test_data/upload_payload.py | meerkat-code/meerkat_tunnel | ff15880b6d388e79d95a60cb67c1ea5fb15089db | [
"MIT"
] | null | null | null | """
Meerkat Nest Test Data
Payload for testing the Meerkat Nest Upload function
"""
upload_payload = {
"token": "",
"content": "record",
"formId": "dem_test",
"formVersion": "",
"data": [
{
"*meta-instance-id*": "uuid:75099745-d218-4129-8b27-de3520c1281a",
"*meta-model-version*": "",
"*meta-ui-version*": "",
"*meta-submission-date*": "2017-06-23T15:51:33.415Z",
"*meta-is-complete*": True,
"*meta-date-marked-as-complete*": "2017-06-23T15:51:33.415Z",
"start": "2017-06-22T14:21:53.490Z",
"end": "2017-06-22T14:22:20.175Z",
"today": "2017-06-22",
"deviceid": "123123123123123",
"subscriberid": "123123123123123",
"simid": "1231231231231231231",
"phonenumber": "+44123123123",
"instanceID": "uuid:75099745-d218-4129-8b27-de3520c1281a"
}
]
}
processed_upload_payload = {
"token": "",
"content": "record",
"formId": "dem_test",
"formVersion": "",
"data": [
{
"*meta-instance-id*": "uuid:75099745-d218-4129-8b27-de3520c1281a",
"*meta-model-version*": "",
"*meta-ui-version*": "",
"*meta-submission-date*": "2017-06-23T15:51:33.415Z",
"*meta-is-complete*": True,
"*meta-date-marked-as-complete*": "2017-06-23T15:51:33.415Z",
"start": "2017-06-22T14:21:53.490Z",
"end": "2017-06-22T14:22:20.175Z",
"today": "2017-06-22",
"deviceid": "355828065518701",
"subscriberid": "234200404200842",
"simid": "",
"phonenumber": "+",
"instanceID": "uuid:75099745-d218-4129-8b27-de3520c1281a"
}
]
} | 33.333333 | 78 | 0.512222 |
acf1f540b2024e23dae179f8abf9897dbfc8ac0c | 2,388 | py | Python | odoo-14.0/addons/l10n_dk/__manifest__.py | Yomy1996/P1 | 59e24cdd5f7f82005fe15bd7a7ff54dd5364dd29 | [
"CC-BY-3.0"
] | null | null | null | odoo-14.0/addons/l10n_dk/__manifest__.py | Yomy1996/P1 | 59e24cdd5f7f82005fe15bd7a7ff54dd5364dd29 | [
"CC-BY-3.0"
] | null | null | null | odoo-14.0/addons/l10n_dk/__manifest__.py | Yomy1996/P1 | 59e24cdd5f7f82005fe15bd7a7ff54dd5364dd29 | [
"CC-BY-3.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Denmark - Accounting',
'version': '1.0',
'author': 'Odoo House ApS',
'website': 'https://odoohouse.dk',
'category': 'Accounting/Localizations/Account Charts',
'description': """
Localization Module for Denmark
===============================
This is the module to manage the **accounting chart for Denmark**. Cover both one-man business as well as I/S, IVS, ApS and A/S
**Modulet opsætter:**
- **Dansk kontoplan**
- Dansk moms
- 25% moms
- Resturationsmoms 6,25%
- Omvendt betalingspligt
- Konteringsgrupper
- EU (Virksomhed)
- EU (Privat)
- 3.lande
- Finans raporter
- Resulttopgørelse
- Balance
- Momsafregning
- Afregning
- Rubrik A, B og C
- **Anglo-Saxon regnskabsmetode**
.
Produkt setup:
==============
**Vare**
**Salgsmoms:** Salgmoms 25%
**Salgskonto:** 1010 Salg af vare, m/moms
**Købsmoms:** Købsmoms 25%
**Købskonto:** 2010 Direkte omkostninger vare, m/moms
.
**Ydelse**
**Salgsmoms:** Salgmoms 25%, ydelser
**Salgskonto:** 1011 Salg af ydelser, m/moms
**Købsmoms:** Købsmoms 25%, ydelser
**Købskonto:** 2011 Direkte omkostninger ydelser, m/moms
.
**Vare med omvendt betalingspligt**
**Salgsmoms:** Salg omvendt betalingspligt
**Salgskonto:** 1012 Salg af vare, u/moms
**Købsmoms:** Køb omvendt betalingspligt
**Købskonto:** 2012 Direkte omkostninger vare, u/moms
.
**Restauration**
**Købsmoms:** Restaurationsmoms 6,25%, købsmoms
**Købskonto:** 4010 Restaurationsbesøg
.
Copyright 2018 Odoo House ApS
""",
'depends': ['account', 'base_iban', 'base_vat'],
'data': [
'data/account_account_tags.xml',
'data/l10n_dk_chart_template_data.xml',
'data/account.account.template.csv',
'data/l10n_dk_chart_template_post_data.xml',
'data/account_tax_template_data.xml',
'data/account_fiscal_position_template.xml',
'data/account_fiscal_position_tax_template.xml',
'data/account_fiscal_position_account_template.xml',
'data/account_chart_template_configuration_data.xml',
'data/menuitem_data.xml'
],
'demo': [
'demo/demo_company.xml',
],
}
| 21.908257 | 127 | 0.619765 |
acf1f5a3242c55bc9008d0e7b6749bb23d235710 | 9,862 | py | Python | instabot/bot/bot_like.py | macross5/instabot | ac20f47237bf3718d3abcb60b902016638f3bb30 | [
"Apache-2.0"
] | null | null | null | instabot/bot/bot_like.py | macross5/instabot | ac20f47237bf3718d3abcb60b902016638f3bb30 | [
"Apache-2.0"
] | null | null | null | instabot/bot/bot_like.py | macross5/instabot | ac20f47237bf3718d3abcb60b902016638f3bb30 | [
"Apache-2.0"
] | null | null | null | from tqdm import tqdm
def like(
self,
media_id,
check_media=True,
container_module="feed_short_url",
feed_position=0,
username=None,
user_id=None,
hashtag_name=None,
hashtag_id=None,
entity_page_name=None,
entity_page_id=None,
):
if not self.reached_limit("likes"):
if self.blocked_actions["likes"]:
self.logger.warning("YOUR `LIKE` ACTION IS BLOCKED")
if self.blocked_actions_protection:
self.logger.warning(
"blocked_actions_protection ACTIVE. Skipping `like` action."
)
return False
self.delay("like")
if check_media and not self.check_media(media_id):
return False
#
# TODO: commented out simulation "open link in app"
#
# if container_module == "feed_short_url":
# if "_" in str(media_id):
# media_pk = int(media_id.split("_")[0])
# else:
# media_pk = int(media_id)
# link = self.get_link_from_media_id(media_pk)
# self.logger.debug("Opening link {}".format(link))
# self.api.open_instagram_link(link)
# self.logger.debug("Getting media info...")
# self.api.media_info(media_id)
#
_r = self.api.like(
media_id,
container_module=container_module,
feed_position=feed_position,
username=username,
user_id=user_id,
hashtag_name=hashtag_name,
hashtag_id=hashtag_id,
entity_page_name=entity_page_name,
entity_page_id=entity_page_id,
)
if _r == "feedback_required":
self.logger.error("`Like` action has been BLOCKED...!!!")
self.blocked_actions["likes"] = True
return "feedback_required"
if _r:
self.logger.info("Liked media %s." % media_id)
self.total["likes"] += 1
return True
else:
self.logger.info("Out of likes for today.")
return False
def like_comment(self, comment_id):
if not self.reached_limit("likes"):
if self.blocked_actions["likes"]:
self.logger.warning("YOUR `LIKE` ACTION IS BLOCKED")
if self.blocked_actions_protection:
from datetime import timedelta
next_reset = (self.start_time.date() + timedelta(days=1)).strftime(
"%Y-%m-%d %H:%M:%S"
)
self.logger.warning(
"blocked_actions_protection ACTIVE. Skipping `like` action till, at least, {}.".format(
next_reset
)
)
return False
self.delay("like")
_r = self.api.like_comment(comment_id)
if _r == "feedback_required":
self.logger.error("`Like` action has been BLOCKED...!!!")
self.blocked_actions["likes"] = True
return False
if _r:
self.logger.info("Liked comment {}.".format(comment_id))
self.total["likes"] += 1
return True
else:
self.logger.info("Out of likes for today.")
return False
def like_media_comments(self, media_id):
broken_items = []
media_comments = self.get_media_comments(media_id)
self.logger.info("Found {} comments".format(len(media_comments)))
comment_ids = [
item["pk"]
for item in media_comments
if not item.get("has_liked_comment") or not item["has_liked_comment"]
]
if not comment_ids:
self.logger.info(
"None comments received: comments not found or comments have been filtered."
)
return broken_items
self.logger.info("Going to like %d comments." % (len(comment_ids)))
for comment in tqdm(comment_ids):
if not self.like_comment(comment):
self.error_delay()
broken_items = comment_ids[comment_ids.index(comment) :]
self.logger.info(
"DONE: Liked {count} comments.".format(
count=len(comment_ids) - len(broken_items)
)
)
return broken_items
def like_medias(
self,
medias,
check_media=True,
container_module="feed_timeline",
username=None,
user_id=None,
hashtag_name=None,
hashtag_id=None,
entity_page_name=None,
entity_page_id=None,
):
broken_items = []
if not medias:
self.logger.info("Nothing to like.")
return broken_items
self.logger.info("Going to like %d medias." % (len(medias)))
feed_position = 0
for media in tqdm(medias):
if not self.like(
media,
check_media=check_media,
container_module=container_module,
feed_position=feed_position,
username=username,
user_id=user_id,
hashtag_name=hashtag_name,
hashtag_id=hashtag_id,
entity_page_name=entity_page_name,
entity_page_id=entity_page_id,
):
self.error_delay()
broken_items.append(media)
feed_position += 1
self.logger.info("DONE: Total liked %d medias." % self.total["likes"])
return broken_items
def like_timeline(self, amount=None):
self.logger.info("Liking timeline feed:")
medias = self.get_timeline_medias()[:amount]
return self.like_medias(medias, check_media=False)
def like_user(self, user_id, amount=None, filtration=True):
""" Likes last user_id's medias """
if filtration:
if not self.check_user(user_id):
return False
self.logger.info("Liking user_%s's feed:" % user_id)
user_id = self.convert_to_user_id(user_id)
medias = self.get_user_medias(user_id, filtration=filtration)
if not medias:
self.logger.info(
"None medias received: account is closed or medias have been filtered."
)
return False
return self.like_medias(medias[:amount], filtration)
def like_users(self, user_ids, nlikes=None, filtration=True):
for user_id in user_ids:
if self.reached_limit("likes"):
self.logger.info("Out of likes for today.")
return
self.like_user(user_id, amount=nlikes, filtration=filtration)
def like_hashtag(self, hashtag, amount=None):
""" Likes last medias from hashtag """
self.logger.info("Going to like media with hashtag #%s." % hashtag)
medias = self.get_total_hashtag_medias(hashtag, amount)
if self.api.search_tags(hashtag):
for tag in self.api.last_json["results"]:
if tag["name"] == hashtag:
hashtag_id = tag["id"]
break
else:
self.logger.error("NO INFO FOR HASHTAG: {}".format(hashtag))
return False
return self.like_medias(
medias,
container_module="feed_contextual_hashtag",
hashtag_name=hashtag,
hashtag_id=hashtag_id,
)
def like_geotag(self, geotag, amount=None):
# TODO: like medias by geotag
pass
def like_followers(self, user_id, nlikes=None, nfollows=None):
self.logger.info("Like followers of: %s." % user_id)
if self.reached_limit("likes"):
self.logger.info("Out of likes for today.")
return
if not user_id:
self.logger.info("User not found.")
return
follower_ids = self.get_user_followers(user_id, nfollows)
if not follower_ids:
self.logger.info("%s not found / closed / has no followers." % user_id)
else:
self.like_users(follower_ids[:nfollows], nlikes)
def like_following(self, user_id, nlikes=None, nfollows=None):
self.logger.info("Like following of: %s." % user_id)
if self.reached_limit("likes"):
self.logger.info("Out of likes for today.")
return
if not user_id:
self.logger.info("User not found.")
return
following_ids = self.get_user_following(user_id, nfollows)
if not following_ids:
self.logger.info("%s not found / closed / has no following." % user_id)
else:
self.like_users(following_ids, nlikes)
def like_location_feed(self, place, amount):
self.logger.info("Searching location: {}".format(place))
self.api.search_location(place)
if not self.api.last_json["items"]:
self.logger.error("{} not found.".format(place))
return False
else:
finded_location = self.api.last_json["items"][0]["location"]["pk"]
self.api.get_location_feed(finded_location)
location_feed = self.api.last_json
if location_feed.get("story"):
self.logger.info("Liking users from stories...")
location_to_filter = location_feed["story"]["items"][:amount]
for i in range(0, len(location_to_filter)):
user = location_to_filter[i]["user"]["pk"]
self.like_user(user_id=user, amount=1, filtration=False)
elif location_feed.get("items"):
self.logger.info("Liking users from images...")
max_id = ""
counter = 0
while counter < amount:
location_to_filter = location_feed["items"][:amount]
medias = self.filter_medias(location_to_filter, filtration=False)
self.like_medias(medias)
counter += 1
if location_feed.get("next_max_id"):
max_id = location_feed["next_max_id"]
else:
return False
self.api.get_location_feed(finded_location, max_id)
location_feed = self.api.last_json
else:
self.logger.error(
" '{}' does not seem to have pictures. Select a different location.".format(
place
)
)
return False
| 34.006897 | 107 | 0.597343 |
acf1f6aca4927ac227404f5883f8260e85d61fc7 | 54,618 | py | Python | client/deis.py | thomasdavis/deis | 02120302926fb3fa18f7275859d6a252c63f4a60 | [
"Apache-2.0"
] | 1 | 2021-11-21T02:55:54.000Z | 2021-11-21T02:55:54.000Z | client/deis.py | thomasdavis/deis | 02120302926fb3fa18f7275859d6a252c63f4a60 | [
"Apache-2.0"
] | null | null | null | client/deis.py | thomasdavis/deis | 02120302926fb3fa18f7275859d6a252c63f4a60 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
The Deis command-line client issues API calls to a Deis controller.
Usage: deis <command> [<args>...]
Auth commands::
register register a new user with a controller
login login to a controller
logout logout from the current controller
Subcommands, use ``deis help [subcommand]`` to learn more::
apps manage applications used to provide services
clusters manage clusters used to host applications
ps manage processes inside an app container
config manage environment variables that define app config
domains manage and assign domain names to your applications
builds manage builds created using `git push`
releases manage releases of an application
keys manage ssh keys used for `git push` deployments
perms manage permissions for shared apps and clusters
Developer shortcut commands::
create create a new application
scale scale processes by type (web=2, worker=1)
info view information about the current app
open open a URL to the app in a browser
logs view aggregated log info for the app
run run a command in an ephemeral app container
destroy destroy an application
Use ``git push deis master`` to deploy to an application.
"""
from __future__ import print_function
from collections import namedtuple
from collections import OrderedDict
from cookielib import MozillaCookieJar
from datetime import datetime
from getpass import getpass
from itertools import cycle
from threading import Event
from threading import Thread
import base64
import glob
import json
import locale
import os.path
import re
import subprocess
import sys
import time
import urlparse
import webbrowser
from dateutil import parser
from dateutil import relativedelta
from dateutil import tz
from docopt import docopt
from docopt import DocoptExit
import requests
import yaml
__version__ = '0.9.0'
locale.setlocale(locale.LC_ALL, '')
class Session(requests.Session):
"""
Session for making API requests and interacting with the filesystem
"""
def __init__(self):
super(Session, self).__init__()
self.trust_env = False
cookie_file = os.path.expanduser('~/.deis/cookies.txt')
cookie_dir = os.path.dirname(cookie_file)
self.cookies = MozillaCookieJar(cookie_file)
# Create the $HOME/.deis dir if it doesn't exist
if not os.path.isdir(cookie_dir):
os.mkdir(cookie_dir, 0700)
# Load existing cookies if the cookies.txt exists
if os.path.isfile(cookie_file):
self.cookies.load()
self.cookies.clear_expired_cookies()
def clear(self, domain):
"""Clear cookies for the specified domain."""
try:
self.cookies.clear(domain)
self.cookies.save()
except KeyError:
pass
def git_root(self):
"""
Return the absolute path from the git repository root
If no git repository exists, raise an EnvironmentError
"""
try:
git_root = subprocess.check_output(
['git', 'rev-parse', '--show-toplevel'],
stderr=subprocess.PIPE).strip('\n')
except subprocess.CalledProcessError:
raise EnvironmentError('Current directory is not a git repository')
return git_root
def get_app(self):
"""
Return the application name for the current directory
The application is determined by parsing `git remote -v` output.
If no application is found, raise an EnvironmentError.
"""
git_root = self.git_root()
# try to match a deis remote
remotes = subprocess.check_output(['git', 'remote', '-v'],
cwd=git_root)
m = re.search(r'^deis\W+(?P<url>\S+)\W+\(', remotes, re.MULTILINE)
if not m:
raise EnvironmentError(
'Could not find deis remote in `git remote -v`')
url = m.groupdict()['url']
m = re.match('\S+/(?P<app>[a-z0-9-]+)(.git)?$', url)
if not m:
raise EnvironmentError("Could not parse: {url}".format(**locals()))
return m.groupdict()['app']
app = property(get_app)
def request(self, *args, **kwargs):
"""
Issue an HTTP request with proper cookie handling including
`Django CSRF tokens <https://docs.djangoproject.com/en/dev/ref/contrib/csrf/>`
"""
for cookie in self.cookies:
if cookie.name == 'csrftoken':
if 'headers' in kwargs:
kwargs['headers']['X-CSRFToken'] = cookie.value
else:
kwargs['headers'] = {'X-CSRFToken': cookie.value}
break
response = super(Session, self).request(*args, **kwargs)
self.cookies.save()
return response
class Settings(dict):
"""
Settings backed by a file in the user's home directory
On init, settings are loaded from ~/.deis/client.yaml
"""
def __init__(self):
path = os.path.expanduser('~/.deis')
if not os.path.exists(path):
os.mkdir(path)
self._path = os.path.join(path, 'client.yaml')
if not os.path.exists(self._path):
with open(self._path, 'w') as f:
f.write(yaml.safe_dump({}))
# load initial settings
self.load()
def load(self):
"""
Deserialize and load settings from the filesystem
"""
with open(self._path) as f:
data = f.read()
settings = yaml.safe_load(data)
self.update(settings)
return settings
def save(self):
"""
Serialize and save settings to the filesystem
"""
data = yaml.safe_dump(dict(self))
with open(self._path, 'w') as f:
f.write(data)
return data
_counter = 0
def _newname(template="Thread-{}"):
"""Generate a new thread name."""
global _counter
_counter += 1
return template.format(_counter)
FRAMES = {
'arrow': ['^', '>', 'v', '<'],
'dots': ['...', 'o..', '.o.', '..o'],
'ligatures': ['bq', 'dp', 'qb', 'pd'],
'lines': [' ', '-', '=', '#', '=', '-'],
'slash': ['-', '\\', '|', '/'],
}
class TextProgress(Thread):
"""Show progress for a long-running operation on the command-line."""
def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
name = name or _newname("TextProgress-Thread-{}")
style = kwargs.get('style', 'dots')
super(TextProgress, self).__init__(
group, target, name, args, kwargs)
self.daemon = True
self.cancelled = Event()
self.frames = cycle(FRAMES[style])
def run(self):
"""Write ASCII progress animation frames to stdout."""
if not os.environ.get('DEIS_HIDE_PROGRESS'):
time.sleep(0.5)
self._write_frame(self.frames.next(), erase=False)
while not self.cancelled.is_set():
time.sleep(0.4)
self._write_frame(self.frames.next())
# clear the animation
sys.stdout.write('\b' * (len(self.frames.next()) + 2))
sys.stdout.flush()
def cancel(self):
"""Set the animation thread as cancelled."""
self.cancelled.set()
def _write_frame(self, frame, erase=True):
if erase:
backspaces = '\b' * (len(frame) + 2)
else:
backspaces = ''
sys.stdout.write("{} {} ".format(backspaces, frame))
# flush stdout or we won't see the frame
sys.stdout.flush()
def dictify(args):
"""Converts a list of key=val strings into a python dict.
>>> dictify(['MONGODB_URL=http://mongolabs.com/test', 'scale=5'])
{'MONGODB_URL': 'http://mongolabs.com/test', 'scale': 5}
"""
data = {}
for arg in args:
try:
var, val = arg.split('=', 1)
except ValueError:
raise DocoptExit()
# Try to coerce the value to an int since that's a common use case
try:
data[var] = int(val)
except ValueError:
data[var] = val
return data
def readable_datetime(datetime_str):
"""
Return a human-readable datetime string from an ECMA-262 (JavaScript)
datetime string.
"""
timezone = tz.tzlocal()
dt = parser.parse(datetime_str).astimezone(timezone)
now = datetime.now(timezone)
delta = relativedelta.relativedelta(now, dt)
# if it happened today, say "2 hours and 1 minute ago"
if delta.days <= 1 and dt.day == now.day:
if delta.hours == 0:
hour_str = ''
elif delta.hours == 1:
hour_str = '1 hour '
else:
hour_str = "{} hours ".format(delta.hours)
if delta.minutes == 0:
min_str = ''
elif delta.minutes == 1:
min_str = '1 minute '
else:
min_str = "{} minutes ".format(delta.minutes)
if not any((hour_str, min_str)):
return 'Just now'
else:
return "{}{}ago".format(hour_str, min_str)
# if it happened yesterday, say "yesterday at 3:23 pm"
yesterday = now + relativedelta.relativedelta(days= -1)
if delta.days <= 2 and dt.day == yesterday.day:
return dt.strftime("Yesterday at %X")
# otherwise return locale-specific date/time format
else:
return dt.strftime('%c %Z')
def trim(docstring):
"""
Function to trim whitespace from docstring
c/o PEP 257 Docstring Conventions
<http://www.python.org/dev/peps/pep-0257/>
"""
if not docstring:
return ''
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
indent = sys.maxint
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed)
class ResponseError(Exception):
pass
class DeisClient(object):
"""
A client which interacts with a Deis controller.
"""
def __init__(self):
self._session = Session()
self._settings = Settings()
def _dispatch(self, method, path, body=None,
headers={'content-type': 'application/json'}, **kwargs):
"""
Dispatch an API request to the active Deis controller
"""
func = getattr(self._session, method.lower())
controller = self._settings['controller']
if not controller:
raise EnvironmentError(
'No active controller. Use `deis login` or `deis register` to get started.')
url = urlparse.urljoin(controller, path, **kwargs)
response = func(url, data=body, headers=headers)
return response
def apps(self, args):
"""
Valid commands for apps:
apps:create create a new application
apps:list list accessible applications
apps:info view info about an application
apps:open open the application in a browser
apps:logs view aggregated application logs
apps:run run a command in an ephemeral app container
apps:destroy destroy an application
Use `deis help [command]` to learn more
"""
return self.apps_list(args)
def apps_create(self, args):
"""
Create a new application
If no ID is provided, one will be generated automatically.
If no cluster is provided, a cluster named "dev" will be used.
Usage: deis apps:create [<id> --cluster=<cluster> --no-remote] [options]
Options
--cluster=CLUSTER target cluster to host application [default: dev]
--no-remote do not create a 'deis' git remote
"""
try:
self._session.git_root() # check for a git repository
except EnvironmentError:
print('No git repository found, use `git init` to create one')
sys.exit(1)
try:
self._session.get_app()
print('Deis remote already exists')
sys.exit(1)
except EnvironmentError:
pass
body = {}
app_name = args.get('<id>')
if app_name:
body.update({'id': app_name})
cluster = args.get('--cluster')
if cluster:
body.update({'cluster': cluster})
sys.stdout.write('Creating application... ')
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
response = self._dispatch('post', '/api/apps',
json.dumps(body))
finally:
progress.cancel()
progress.join()
if response.status_code == requests.codes.created: # @UndefinedVariable
data = response.json()
app_id = data['id']
print("done, created {}".format(app_id))
# add a git remote
# TODO: retrieve the hostname from service discovery
hostname = urlparse.urlparse(self._settings['controller']).netloc.split(':')[0]
git_remote = "ssh://git@{hostname}:2222/{app_id}.git".format(**locals())
if args.get('--no-remote'):
print('remote available at {}'.format(git_remote))
else:
try:
subprocess.check_call(
['git', 'remote', 'add', '-f', 'deis', git_remote],
stdout=subprocess.PIPE)
print('Git remote deis added')
except subprocess.CalledProcessError:
print('Could not create Deis remote')
sys.exit(1)
else:
raise ResponseError(response)
def apps_destroy(self, args):
"""
Destroy an application
Usage: deis apps:destroy [--app=<id> --confirm=<confirm>]
"""
app = args.get('--app')
if not app:
app = self._session.app
confirm = args.get('--confirm')
if confirm == app:
pass
else:
print("""
! WARNING: Potentially Destructive Action
! This command will destroy the application: {app}
! To proceed, type "{app}" or re-run this command with --confirm={app}
""".format(**locals()))
confirm = raw_input('> ').strip('\n')
if confirm != app:
print('Destroy aborted')
return
sys.stdout.write("Destroying {}... ".format(app))
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
before = time.time()
response = self._dispatch('delete', "/api/apps/{}".format(app))
finally:
progress.cancel()
progress.join()
if response.status_code in (requests.codes.no_content, # @UndefinedVariable
requests.codes.not_found): # @UndefinedVariable
print('done in {}s'.format(int(time.time() - before)))
# If the requested app is in the current dir, delete the git remote
try:
if app == self._session.app:
subprocess.check_call(
['git', 'remote', 'rm', 'deis'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print('Git remote deis removed')
except (EnvironmentError, subprocess.CalledProcessError):
pass # ignore error
else:
raise ResponseError(response)
def apps_list(self, args):
"""
List applications visible to the current user
Usage: deis apps:list
"""
response = self._dispatch('get', '/api/apps')
if response.status_code == requests.codes.ok: # @UndefinedVariable
data = response.json()
print('=== Apps')
for item in data['results']:
print('{id}'.format(**item))
else:
raise ResponseError(response)
def apps_info(self, args):
"""
Print info about the current application
Usage: deis apps:info [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
response = self._dispatch('get', "/api/apps/{}".format(app))
if response.status_code == requests.codes.ok: # @UndefinedVariable
print("=== {} Application".format(app))
print(json.dumps(response.json(), indent=2))
print()
self.ps_list(args)
self.domains_list(args)
print()
else:
raise ResponseError(response)
def apps_open(self, args):
"""
Open a URL to the application in a browser
Usage: deis apps:open [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
# TODO: replace with a single API call to apps endpoint
response = self._dispatch('get', "/api/apps/{}".format(app))
if response.status_code == requests.codes.ok: # @UndefinedVariable
cluster = response.json()['cluster']
else:
raise ResponseError(response)
response = self._dispatch('get', "/api/clusters/{}".format(cluster))
if response.status_code == requests.codes.ok: # @UndefinedVariable
domain = response.json()['domain']
# use the OS's default handler to open this URL
webbrowser.open('http://{}.{}/'.format(app, domain))
return domain
else:
raise ResponseError(response)
def apps_logs(self, args):
"""
Retrieve the most recent log events
Usage: deis apps:logs [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
response = self._dispatch('post',
"/api/apps/{}/logs".format(app))
if response.status_code == requests.codes.ok: # @UndefinedVariable
print(response.json())
else:
raise ResponseError(response)
def apps_run(self, args):
"""
Run a command inside an ephemeral app container
Usage: deis apps:run <command>...
"""
app = args.get('--app')
if not app:
app = self._session.app
body = {'command': ' '.join(sys.argv[2:])}
response = self._dispatch('post',
"/api/apps/{}/run".format(app),
json.dumps(body))
if response.status_code == requests.codes.ok: # @UndefinedVariable
rc, output = json.loads(response.content)
sys.stdout.write(output)
sys.stdout.flush()
sys.exit(rc)
else:
raise ResponseError(response)
def auth(self, args):
"""
Valid commands for auth:
auth:register register a new user
auth:cancel remove the current account
auth:login authenticate against a controller
auth:logout clear the current user session
Use `deis help [command]` to learn more
"""
return
def auth_register(self, args):
"""
Register a new user with a Deis controller
Usage: deis auth:register <controller> [options]
Options:
--username=USERNAME provide a username for the new account
--password=PASSWORD provide a password for the new account
--email=EMAIL provide an email address
"""
controller = args['<controller>']
if not urlparse.urlparse(controller).scheme:
controller = "http://{}".format(controller)
username = args.get('--username')
if not username:
username = raw_input('username: ')
password = args.get('--password')
if not password:
password = getpass('password: ')
confirm = getpass('password (confirm): ')
if password != confirm:
print('Password mismatch, aborting registration.')
sys.exit(1)
email = args.get('--email')
if not email:
email = raw_input('email: ')
url = urlparse.urljoin(controller, '/api/auth/register')
payload = {'username': username, 'password': password, 'email': email}
response = self._session.post(url, data=payload, allow_redirects=False)
if response.status_code == requests.codes.created: # @UndefinedVariable
self._settings['controller'] = controller
self._settings.save()
print("Registered {}".format(username))
login_args = {'--username': username, '--password': password,
'<controller>': controller}
if self.auth_login(login_args) is False:
print('Login failed')
else:
print('Registration failed', response.content)
sys.exit(1)
return True
def auth_cancel(self, args):
"""
Cancel and remove the current account.
Usage: deis auth:cancel
"""
controller = self._settings.get('controller')
if not controller:
print('Not logged in to a Deis controller')
sys.exit(1)
print('Please log in again in order to cancel this account')
username = self.auth_login({'<controller>': controller})
if username:
confirm = raw_input("Cancel account \"{}\" at {}? (y/n) ".format(username, controller))
if confirm == 'y':
self._dispatch('delete', '/api/auth/cancel')
self._session.cookies.clear()
self._session.cookies.save()
self._settings['controller'] = None
self._settings.save()
print('Account cancelled')
else:
print('Accont not changed')
def auth_login(self, args):
"""
Login by authenticating against a controller
Usage: deis auth:login <controller> [--username=<username> --password=<password>]
"""
controller = args['<controller>']
if not urlparse.urlparse(controller).scheme:
controller = "http://{}".format(controller)
username = args.get('--username')
headers = {}
if not username:
username = raw_input('username: ')
password = args.get('--password')
if not password:
password = getpass('password: ')
url = urlparse.urljoin(controller, '/api/auth/login/')
payload = {'username': username, 'password': password}
# clear any cookies for this controller's domain
self._session.clear(urlparse.urlparse(url).netloc)
# prime cookies for login
self._session.get(url, headers=headers)
# post credentials to the login URL
response = self._session.post(url, data=payload, allow_redirects=False)
if response.status_code == requests.codes.found: # @UndefinedVariable
self._settings['controller'] = controller
self._settings.save()
print("Logged in as {}".format(username))
return username
else:
self._session.cookies.clear()
self._session.cookies.save()
raise ResponseError(response)
def auth_logout(self, args):
"""
Logout from a controller and clear the user session
Usage: deis auth:logout
"""
controller = self._settings.get('controller')
if controller:
self._dispatch('get', '/api/auth/logout/')
self._session.cookies.clear()
self._session.cookies.save()
self._settings['controller'] = None
self._settings.save()
print('Logged out')
def builds(self, args):
"""
Valid commands for builds:
builds:list list build history for an application
builds:create coming soon!
Use `deis help [command]` to learn more
"""
return self.builds_list(args)
def builds_create(self, args):
"""
Create a new build of an application
Usage: deis builds:create <image> [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
body = {'image': args['<image>']}
sys.stdout.write('Creating build... ')
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
response = self._dispatch('post', "/api/apps/{}/builds".format(app), json.dumps(body))
finally:
progress.cancel()
progress.join()
if response.status_code == requests.codes.created: # @UndefinedVariable
version = response.headers['x-deis-release']
print("done, v{}".format(version))
else:
raise ResponseError(response)
def builds_list(self, args):
"""
List build history for an application
Usage: deis builds:list [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
response = self._dispatch('get', "/api/apps/{}/builds".format(app))
if response.status_code == requests.codes.ok: # @UndefinedVariable
print("=== {} Builds".format(app))
data = response.json()
for item in data['results']:
print("{0[uuid]:<23} {0[created]}".format(item))
else:
raise ResponseError(response)
def clusters(self, args):
"""
Valid commands for clusters:
clusters:create create a new cluster
clusters:list list accessible clusters
clusters:update update cluster fields
clusters:info print a represenation of the cluster
clusters:destroy destroy a cluster
Use `deis help [command]` to learn more
"""
return self.clusters_list(args)
def clusters_create(self, args):
"""
Create a new cluster
A globally unique cluster ID must be provided.
A domain field must also be provided to support multiple
applications hosted on the cluster. Note this requires
wildcard DNS configuration on the domain.
For example, a domain of "deisapp.com" requires that \\*.deisapp.com\\
resolve to the cluster's router endpoints.
Usage: deis clusters:create <id> <domain> --hosts=<hosts> --auth=<auth> [options]
Parameters:
<id> a name for the cluster
<domain> a domain under which app hostnames will live
<hosts> a comma-separated list of cluster members
<auth> a path to an SSH private key used to connect to cluster members
Options:
--type=TYPE cluster type [default: coreos]
"""
body = {'id': args['<id>'], 'domain': args['<domain>'],
'hosts': args['--hosts'], 'type': args['--type']}
auth_path = os.path.expanduser(args['--auth'])
if not os.path.exists(auth_path):
print('Path to authentication credentials does not exist: {}'.format(auth_path))
sys.exit(1)
with open(auth_path) as f:
data = f.read()
body.update({'auth': base64.b64encode(data)})
sys.stdout.write('Creating cluster... ')
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
response = self._dispatch('post', '/api/clusters', json.dumps(body))
finally:
progress.cancel()
progress.join()
if response.status_code == requests.codes.created: # @UndefinedVariable
data = response.json()
cluster = data['id']
print("done, created {}".format(cluster))
else:
raise ResponseError(response)
def clusters_info(self, args):
"""
Print info about a cluster
Usage: deis clusters:info <id>
"""
cluster = args.get('<id>')
response = self._dispatch('get', "/api/clusters/{}".format(cluster))
if response.status_code == requests.codes.ok: # @UndefinedVariable
print("=== {} Cluster".format(cluster))
print(json.dumps(response.json(), indent=2))
print()
else:
raise ResponseError(response)
def clusters_list(self, args):
"""
List available clusters
Usage: deis clusters:list
"""
response = self._dispatch('get', '/api/clusters')
if response.status_code == requests.codes.ok: # @UndefinedVariable
data = response.json()
print("=== Clusters")
for item in data['results']:
print("{id}".format(**item))
else:
raise ResponseError(response)
def clusters_destroy(self, args):
"""
Destroy a cluster
Usage: deis clusters:destroy <id> [--confirm=<confirm>]
"""
cluster = args.get('<id>')
confirm = args.get('--confirm')
if confirm == cluster:
pass
else:
print("""
! WARNING: Potentially Destructive Action
! This command will destroy the cluster: {cluster}
! To proceed, type "{cluster}" or re-run this command with --confirm={cluster}
""".format(**locals()))
confirm = raw_input('> ').strip('\n')
if confirm != cluster:
print('Destroy aborted')
return
sys.stdout.write("Destroying cluster... ".format(cluster))
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
before = time.time()
response = self._dispatch('delete', "/api/clusters/{}".format(cluster))
finally:
progress.cancel()
progress.join()
if response.status_code in (requests.codes.no_content, # @UndefinedVariable
requests.codes.not_found): # @UndefinedVariable
print('done in {}s'.format(int(time.time() - before)))
else:
raise ResponseError(response)
def clusters_update(self, args):
"""
Update cluster fields
Usage: deis clusters:update <id> [--domain=<domain> --hosts=<hosts> --auth=<auth>] [options]
Options:
--type=TYPE cluster type [default: coreos]
"""
cluster = args['<id>']
body = {}
for k, arg in (('domain', '--domain'), ('hosts', '--hosts'),
('auth', '--auth'), ('type', '--type')):
v = args.get(arg)
if v:
body.update({k: v})
response = self._dispatch('patch', '/api/clusters/{}'.format(cluster),
json.dumps(body))
if response.status_code == requests.codes.ok: # @UndefinedVariable
print(json.dumps(response.json(), indent=2))
else:
raise ResponseError(response)
def config(self, args):
"""
Valid commands for config:
config:list list environment variables for an app
config:set set environment variables for an app
config:unset unset environment variables for an app
Use `deis help [command]` to learn more
"""
sys.argv[1] = 'config:list'
args = docopt(self.config_list.__doc__)
return self.config_list(args)
def config_list(self, args):
"""
List environment variables for an application
Usage: deis config:list [--oneline] [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
oneline = args.get('--oneline')
response = self._dispatch('get', "/api/apps/{}/config".format(app))
if response.status_code == requests.codes.ok: # @UndefinedVariable
config = response.json()
values = json.loads(config['values'])
print("=== {} Config".format(app))
items = values.items()
if len(items) == 0:
print('No configuration')
return
keys = sorted(values)
if not oneline:
width = max(map(len, keys)) + 5
for k in keys:
v = values[k]
print(("{k:<" + str(width) + "} {v}").format(**locals()))
else:
output = []
for k in keys:
v = values[k]
output.append("{k}={v}".format(**locals()))
print(' '.join(output))
else:
raise ResponseError(response)
def config_set(self, args):
"""
Set environment variables for an application
Usage: deis config:set <var>=<value>... [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
body = {'values': json.dumps(dictify(args['<var>=<value>']))}
sys.stdout.write('Creating config... ')
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
response = self._dispatch('post', "/api/apps/{}/config".format(app), json.dumps(body))
finally:
progress.cancel()
progress.join()
if response.status_code == requests.codes.created: # @UndefinedVariable
version = response.headers['x-deis-release']
print("done, v{}\n".format(version))
config = response.json()
values = json.loads(config['values'])
print("=== {}".format(app))
items = values.items()
if len(items) == 0:
print('No configuration')
return
for k, v in values.items():
print("{k}: {v}".format(**locals()))
else:
raise ResponseError(response)
def config_unset(self, args):
"""
Unset an environment variable for an application
Usage: deis config:unset <key>... [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
values = {}
for k in args.get('<key>'):
values[k] = None
body = {'values': json.dumps(values)}
sys.stdout.write('Creating config... ')
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
response = self._dispatch('post', "/api/apps/{}/config".format(app), json.dumps(body))
finally:
progress.cancel()
progress.join()
if response.status_code == requests.codes.created: # @UndefinedVariable
version = response.headers['x-deis-release']
print("done, v{}\n".format(version))
config = response.json()
values = json.loads(config['values'])
print("=== {}".format(app))
items = values.items()
if len(items) == 0:
print('No configuration')
return
for k, v in values.items():
print("{k}: {v}".format(**locals()))
else:
raise ResponseError(response)
def domains(self, args):
"""
Valid commands for domains:
domains:add bind a domain to an application
domains:list list domains bound to an application
domains:remove unbind a domain from an application
Use `deis help [command]` to learn more
"""
return self.domains_list(args)
def domains_add(self, args):
"""
Bind a domain to an application
Usage: deis domains:add <domain> [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
domain = args.get('<domain>')
body = {'domain': domain}
sys.stdout.write("Adding {domain} to {app}... ".format(**locals()))
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
response = self._dispatch('post', "/api/apps/{app}/domains".format(app=app), json.dumps(body))
finally:
progress.cancel()
progress.join()
if response.status_code == requests.codes.created: # @UndefinedVariable
print("done")
else:
raise ResponseError(response)
def domains_remove(self, args):
"""
Unbind a domain for an application
Usage: deis domains:remove <domain> [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
domain = args.get('<domain>')
sys.stdout.write("Removing {domain} from {app}... ".format(**locals()))
sys.stdout.flush()
try:
progress = TextProgress()
progress.start()
response = self._dispatch('delete', "/api/apps/{app}/domains/{domain}".format(**locals()))
finally:
progress.cancel()
progress.join()
if response.status_code == requests.codes.no_content: # @UndefinedVariable
print("done")
else:
raise ResponseError(response)
def domains_list(self, args):
"""
List domains bound to an application
Usage: deis domains:list [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
response = self._dispatch(
'get', "/api/apps/{app}/domains".format(app=app))
if response.status_code == requests.codes.ok: # @UndefinedVariable
domains = response.json()['results']
print("=== {} Domains".format(app))
if len(domains) == 0:
print('No domains')
return
for domain in domains:
print(domain['domain'])
else:
raise ResponseError(response)
def ps(self, args):
"""
Valid commands for processes:
ps:list list application processes
ps:scale scale processes (e.g. web=4 worker=2)
Use `deis help [command]` to learn more
"""
sys.argv[1] = 'ps:list'
args = docopt(self.ps_list.__doc__)
return self.ps_list(args)
def ps_list(self, args, app=None):
"""
List processes servicing an application
Usage: deis ps:list [--app=<app>]
"""
if not app:
app = args.get('--app')
if not app:
app = self._session.app
response = self._dispatch('get',
"/api/apps/{}/containers".format(app))
if response.status_code != requests.codes.ok: # @UndefinedVariable
raise ResponseError(response)
processes = response.json()
print("=== {} Processes".format(app))
c_map = {}
for item in processes['results']:
c_map.setdefault(item['type'], []).append(item)
print()
for c_type in c_map.keys():
print("--- {c_type}: ".format(**locals()))
for c in c_map[c_type]:
print("{type}.{num} {state} ({release})".format(**c))
print()
def ps_scale(self, args):
"""
Scale an application's processes by type
Example: deis ps:scale web=4 worker=2
Usage: deis ps:scale <type=num>... [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.get_app()
body = {}
for type_num in args.get('<type=num>'):
typ, count = type_num.split('=')
body.update({typ: int(count)})
print('Scaling processes... but first, coffee!')
try:
progress = TextProgress()
progress.start()
before = time.time()
response = self._dispatch('post',
"/api/apps/{}/scale".format(app),
json.dumps(body))
finally:
progress.cancel()
progress.join()
if response.status_code == requests.codes.no_content: # @UndefinedVariable
print('done in {}s\n'.format(int(time.time() - before)))
self.ps_list({}, app)
else:
raise ResponseError(response)
def keys(self, args):
"""
Valid commands for SSH keys:
keys:list list SSH keys for the logged in user
keys:add add an SSH key
keys:remove remove an SSH key
Use `deis help [command]` to learn more
"""
return self.keys_list(args)
def keys_add(self, args):
"""
Add SSH keys for the logged in user
Usage: deis keys:add [<key>]
"""
path = args.get('<key>')
if not path:
selected_key = self._ask_pubkey_interactively()
else:
# check the specified key format
selected_key = self._parse_key(path)
if not selected_key:
return
# Upload the key to Deis
body = {
'id': selected_key.id,
'public': "{} {}".format(selected_key.type, selected_key.str)
}
sys.stdout.write("Uploading {} to Deis...".format(selected_key.id))
sys.stdout.flush()
response = self._dispatch('post', '/api/keys', json.dumps(body))
if response.status_code == requests.codes.created: # @UndefinedVariable
print('done')
else:
raise ResponseError(response)
def _parse_key(self, path):
"""Parse an SSH public key path into a Key namedtuple."""
Key = namedtuple('Key', 'path name type str comment id')
name = path.split(os.path.sep)[-1]
with open(path) as f:
data = f.read()
match = re.match(r'^(ssh-...) ([^ ]+) ?(.*)', data)
if not match:
print("Could not parse SSH public key {0}".format(name))
sys.exit(1)
key_type, key_str, key_comment = match.groups()
if key_comment:
key_id = key_comment
else:
key_id = name.replace('.pub', '')
return Key(path, name, key_type, key_str, key_comment, key_id)
def _ask_pubkey_interactively(self):
# find public keys and prompt the user to pick one
ssh_dir = os.path.expanduser('~/.ssh')
pubkey_paths = glob.glob(os.path.join(ssh_dir, '*.pub'))
if not pubkey_paths:
print('No SSH public keys found')
return
pubkeys_list = [self._parse_key(k) for k in pubkey_paths]
print('Found the following SSH public keys:')
for i, key_ in enumerate(pubkeys_list):
print("{}) {} {}".format(i + 1, key_.name, key_.comment))
print("0) Enter path to pubfile (or use keys:add <key_path>) ")
inp = raw_input('Which would you like to use with Deis? ')
try:
if int(inp) != 0:
selected_key = pubkeys_list[int(inp) - 1]
else:
selected_key_path = raw_input('Enter the path to the pubkey file: ')
selected_key = self._parse_key(os.path.expanduser(selected_key_path))
except:
print('Aborting')
return
return selected_key
def keys_list(self, args):
"""
List SSH keys for the logged in user
Usage: deis keys:list
"""
response = self._dispatch('get', '/api/keys')
if response.status_code == requests.codes.ok: # @UndefinedVariable
data = response.json()
if data['count'] == 0:
print('No keys found')
return
print("=== {owner} Keys".format(**data['results'][0]))
for key in data['results']:
public = key['public']
print("{0} {1}...{2}".format(
key['id'], public[0:16], public[-10:]))
else:
raise ResponseError(response)
def keys_remove(self, args):
"""
Remove an SSH key for the logged in user
Usage: deis keys:remove <key>
"""
key = args.get('<key>')
sys.stdout.write("Removing {} SSH Key... ".format(key))
sys.stdout.flush()
response = self._dispatch('delete', "/api/keys/{}".format(key))
if response.status_code == requests.codes.no_content: # @UndefinedVariable
print('done')
else:
raise ResponseError(response)
def perms(self, args):
"""
Valid commands for perms:
perms:list list permissions granted on an app or cluster
perms:create create a new permission for a user
perms:delete delete a permission for a user
Use `deis help perms:[command]` to learn more
"""
# perms:transfer transfer ownership of an app or cluster
sys.argv[1] = 'perms:list'
args = docopt(self.perms_list.__doc__)
return self.perms_list(args)
def perms_list(self, args):
"""
List all users with permission to use an app, or list all users
with system administrator privileges.
Usage: deis perms:list [--app=<app>|--admin]
"""
app, url = self._parse_perms_args(args)
response = self._dispatch('get', url)
if response.status_code == requests.codes.ok:
print(json.dumps(response.json(), indent=2))
else:
raise ResponseError(response)
def perms_create(self, args):
"""
Give another user permission to use an app, or give another user
system administrator privileges.
Usage: deis perms:create <username> [--app=<app>|--admin]
"""
app, url = self._parse_perms_args(args)
username = args.get('<username>')
body = {'username': username}
if app:
msg = "Adding {} to {} collaborators... ".format(username, app)
else:
msg = "Adding {} to system administrators... ".format(username)
sys.stdout.write(msg)
sys.stdout.flush()
response = self._dispatch('post', url, json.dumps(body))
if response.status_code == requests.codes.created:
print('done')
else:
raise ResponseError(response)
def perms_delete(self, args):
"""
Revoke another user's permission to use an app, or revoke another
user's system administrator privileges.
Usage: deis perms:delete <username> [--app=<app>|--admin]
"""
app, url = self._parse_perms_args(args)
username = args.get('<username>')
url = "{}/{}".format(url, username)
if app:
msg = "Removing {} from {} collaborators... ".format(username, app)
else:
msg = "Remove {} from system administrators... ".format(username)
sys.stdout.write(msg)
sys.stdout.flush()
response = self._dispatch('delete', url)
if response.status_code == requests.codes.no_content:
print('done')
else:
raise ResponseError(response)
def _parse_perms_args(self, args):
app = args.get('--app'),
admin = args.get('--admin')
if admin:
app = None
url = '/api/admin/perms'
else:
app = app[0] or self._session.app
url = "/api/apps/{}/perms".format(app)
return app, url
def releases(self, args):
"""
Valid commands for releases:
releases:list list an application's release history
releases:info print information about a specific release
releases:rollback return to a previous release
Use `deis help [command]` to learn more
"""
return self.releases_list(args)
def releases_info(self, args):
"""
Print info about a particular release
Usage: deis releases:info <version> [--app=<app>]
"""
version = args.get('<version>')
if not version.startswith('v'):
version = 'v' + version
app = args.get('--app')
if not app:
app = self._session.app
response = self._dispatch(
'get', "/api/apps/{app}/releases/{version}".format(**locals()))
if response.status_code == requests.codes.ok: # @UndefinedVariable
print(json.dumps(response.json(), indent=2))
else:
raise ResponseError(response)
def releases_list(self, args):
"""
List release history for an application
Usage: deis releases:list [--app=<app>]
"""
app = args.get('--app')
if not app:
app = self._session.app
response = self._dispatch('get', "/api/apps/{app}/releases".format(**locals()))
if response.status_code == requests.codes.ok: # @UndefinedVariable
print("=== {} Releases".format(app))
data = response.json()
for item in data['results']:
item['created'] = readable_datetime(item['created'])
print("v{version:<6} {created:<33} {summary}".format(**item))
else:
raise ResponseError(response)
def releases_rollback(self, args):
"""
Roll back to a previous application release.
Usage: deis releases:rollback [--app=<app>] [<version>]
"""
app = args.get('--app')
if not app:
app = self._session.app
version = args.get('<version>')
if version:
if version.startswith('v'):
version = version[1:]
body = {'version': int(version)}
else:
body = {}
url = "/api/apps/{app}/releases/rollback".format(**locals())
response = self._dispatch('post', url, json.dumps(body))
if response.status_code == requests.codes.created:
print(response.json())
else:
raise ResponseError(response)
def shortcuts(self, args):
"""
Show valid shortcuts for client commands.
Usage: deis shortcuts
"""
print('Valid shortcuts are:\n')
for shortcut, command in SHORTCUTS.items():
if ':' not in shortcut:
print("{:<10} -> {}".format(shortcut, command))
print('\nUse `deis help [command]` to learn more')
SHORTCUTS = OrderedDict([
('create', 'apps:create'),
('destroy', 'apps:destroy'),
('init', 'clusters:create'),
('info', 'apps:info'),
('run', 'apps:run'),
('open', 'apps:open'),
('logs', 'apps:logs'),
('register', 'auth:register'),
('login', 'auth:login'),
('logout', 'auth:logout'),
('scale', 'ps:scale'),
('rollback', 'releases:rollback'),
('sharing', 'perms:list'),
('sharing:list', 'perms:list'),
('sharing:add', 'perms:create'),
('sharing:remove', 'perms:delete'),
])
def parse_args(cmd):
"""
Parse command-line args applying shortcuts and looking for help flags
"""
if cmd == 'help':
cmd = sys.argv[-1]
help_flag = True
else:
cmd = sys.argv[1]
help_flag = False
# swap cmd with shortcut
if cmd in SHORTCUTS:
cmd = SHORTCUTS[cmd]
# change the cmdline arg itself for docopt
if not help_flag:
sys.argv[1] = cmd
else:
sys.argv[2] = cmd
# convert : to _ for matching method names and docstrings
if ':' in cmd:
cmd = '_'.join(cmd.split(':'))
return cmd, help_flag
def _dispatch_cmd(method, args):
try:
method(args)
except requests.exceptions.ConnectionError as err:
print("Couldn't connect to the Deis Controller. Make sure that the Controller URI is \
correct and the server is running.")
sys.exit(1)
except EnvironmentError as err:
raise DocoptExit(err.message)
except ResponseError as err:
resp = err.message
print('{} {}'.format(resp.status_code, resp.reason))
try:
msg = resp.json()
if 'detail' in msg:
msg = "Detail:\n{}".format(msg['detail'])
except:
msg = resp.text
print(msg)
sys.exit(1)
def main():
"""
Create a client, parse the arguments received on the command line, and
call the appropriate method on the client.
"""
cli = DeisClient()
args = docopt(__doc__, version='Deis CLI {}'.format(__version__),
options_first=True)
cmd = args['<command>']
cmd, help_flag = parse_args(cmd)
# print help if it was asked for
if help_flag:
if cmd != 'help' and cmd in dir(cli):
print(trim(getattr(cli, cmd).__doc__))
return
docopt(__doc__, argv=['--help'])
# unless cmd needs to use sys.argv directly
if hasattr(cli, cmd):
method = getattr(cli, cmd)
else:
raise DocoptExit('Found no matching command, try `deis help`')
# re-parse docopt with the relevant docstring unless it needs sys.argv
if cmd not in ('apps_run',):
docstring = trim(getattr(cli, cmd).__doc__)
if 'Usage: ' in docstring:
args.update(docopt(docstring))
# dispatch the CLI command
_dispatch_cmd(method, args)
if __name__ == '__main__':
main()
sys.exit(0)
| 34.394207 | 106 | 0.550734 |
acf1f760e3c79acfaf4c4b4309188d1ed3379380 | 743 | py | Python | bread/identify.py | systocrat/bread | e34a40e8554f847f5c564da144f7eef6c0326718 | [
"MIT"
] | 11 | 2016-05-17T06:18:03.000Z | 2017-02-11T03:03:26.000Z | bread/identify.py | systocrat/bread | e34a40e8554f847f5c564da144f7eef6c0326718 | [
"MIT"
] | 3 | 2016-05-21T19:05:05.000Z | 2016-05-21T21:51:07.000Z | bread/identify.py | systocrat/bread | e34a40e8554f847f5c564da144f7eef6c0326718 | [
"MIT"
] | null | null | null | import re
import six
protocols = dict()
class ProtocolNotFoundException(Exception):
pass
def identifyProtocol(data):
for protocol in six.itervalues(protocols):
if protocol.matches(data):
return protocol
raise ProtocolNotFoundException()
class ProtocolIdentifier(object):
def __init__(self, name):
self.name = name
def matches(self, data, **kwargs):
pass
from bread.protocols.flash import FlashPolicyIdentifier
from bread.protocols.http import HttpIdentifier
from bread.protocols.irc import IRCIdentifier
from bread.protocols.ssh2 import SSH2Identifier
_defaultProtocols = [
FlashPolicyIdentifier(),
HttpIdentifier(),
IRCIdentifier(),
SSH2Identifier()
]
for proto in _defaultProtocols:
protocols[proto.name] = proto | 20.081081 | 55 | 0.79004 |
acf1f797f60a653246e44032f1425bce8bef047d | 15,101 | py | Python | torrt/base_tracker.py | alexlitvinenko/torrt | e034b60ff4ebfda56344ff31bd10aedc840806c9 | [
"BSD-3-Clause"
] | null | null | null | torrt/base_tracker.py | alexlitvinenko/torrt | e034b60ff4ebfda56344ff31bd10aedc840806c9 | [
"BSD-3-Clause"
] | null | null | null | torrt/base_tracker.py | alexlitvinenko/torrt | e034b60ff4ebfda56344ff31bd10aedc840806c9 | [
"BSD-3-Clause"
] | null | null | null | import logging
import re
from datetime import datetime
from functools import partial
from itertools import chain
import requests
from six.moves.urllib.parse import urlparse, urljoin, parse_qs
from torrt.exceptions import TorrtException
from torrt.utils import parse_torrent, make_soup, encode_value, WithSettings, TrackerObjectsRegistry, dump_contents
LOGGER = logging.getLogger(__name__)
REQUEST_TIMEOUT = 10
REQUEST_USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36')
class BaseTracker(WithSettings):
"""Base torrent tracker handler class offering helper methods for its ancestors."""
config_entry_name = 'trackers'
alias = None
"""Tracker alias. Usually main tracker domain. See also `mirrors` attribute."""
mirrors = []
"""List of mirror domain names."""
encoding = None
"""Tracker html page encoding (cp1251 or other)."""
def __init__(self):
self.mirror_picked = None
def encode_value(self, value):
"""Encodes a value.
:param str|unicode value:
:param str|unicode encoding: Encoding charset.
:rtype: bytes
"""
return encode_value(value, self.encoding)
def pick_mirror(self, url):
"""Probes mirrors (domains) one by one and chooses one whick is available to use.
:param str url:
:rtype: str
"""
mirror_picked = self.mirror_picked
if mirror_picked is None:
LOGGER.debug('Picking a mirror ...')
original_domain = self.extract_domain(url)
mirror_picked = original_domain
for mirror_domain in self.mirrors:
mirror_url = '%s://%s' % (self.extract_scheme(url), mirror_domain)
LOGGER.debug('Probing mirror: `%s` ...', mirror_url)
try:
response = requests.get(mirror_url)
except requests.exceptions.RequestException as e:
continue
if response.url.startswith(mirror_url):
mirror_picked = mirror_domain
break
self.mirror_picked = mirror_picked
return mirror_picked
def get_mirrored_url(self, url):
"""Returns a mirrored URL for a given one.
:param str url:
:rtype: str
"""
mirror_picked = self.mirror_picked
original_domain = self.extract_domain(url)
url_mirror = url.replace(original_domain, mirror_picked)
return url_mirror
def register(self):
"""Adds this object into TrackerObjectsRegistry.
:return:
"""
TrackerObjectsRegistry.add(self)
@classmethod
def can_handle(cls, string):
"""Returns boolean whether this tracker can handle torrent from string.
:param str string: String, describing torrent. E.g. URL from torrent comment.
:rtype: bool
"""
for domain in chain([cls.alias], cls.mirrors):
if domain in string:
return True
return False
@classmethod
def extract_scheme(cls, url):
"""Extracts scheme from a given URL.
:param str url:
:rtype: str
"""
return urlparse(url).scheme
@classmethod
def extract_domain(cls, url):
"""Extracts domain from a given URL.
:param str url:
:rtype: str
"""
return urlparse(url).netloc
def get_response(self, url, form_data=None, allow_redirects=True,
referer=None, cookies=None, query_string=None, as_soup=False):
"""Returns an HTTP resource object from given URL.
If a dictionary is passed in `form_data` POST HTTP method
would be used to pass data to resource (even if that dictionary is empty).
:param url: str - URL to get data from
:param form_data: dict - data for POST
:param allow_redirects: bool - whether to follow server redirects
:param referer: str or None - data to put into Referer header
:param cookies: dict or None - cookies to use
:param query_string: str or None - query string (GET parameters) to add to URL
:param as_soup: bool - whether to return BeautifulSoup object instead of Requests response
:return: object
:rtype: Response or BeautifulSoup
"""
if query_string is not None:
delim = '?'
if '?' in url:
delim = '&'
url = '%s%s%s' % (url, delim, query_string)
self.pick_mirror(url)
url = self.get_mirrored_url(url)
LOGGER.debug('Fetching %s ...', url)
headers = {'User-agent': REQUEST_USER_AGENT}
if referer is not None:
headers['Referer'] = referer
r_kwargs = {
'allow_redirects': allow_redirects,
'headers': headers,
'timeout': REQUEST_TIMEOUT,
}
if cookies is not None:
r_kwargs['cookies'] = cookies
if form_data is not None:
method = partial(requests.post, data=form_data, **r_kwargs)
else:
method = partial(requests.get, **r_kwargs)
result = None
try:
result = method(url)
if as_soup:
result = self.make_page_soup(result.text)
dump_contents('%s_%s.html' % (self.__class__.__name__, datetime.now()), contents=result)
return result
except requests.exceptions.RequestException as e:
LOGGER.error('Failed to get resource from `%s`: %s', getattr(result, 'url', url), e.message)
return None
@classmethod
def make_page_soup(cls, html):
"""Returns BeautifulSoup object from a html.
:param html: str
:return: object
:rtype: BeautifulSoup
"""
return make_soup(html)
@classmethod
def find_links(cls, url, page_soup, definite=None):
"""Returns a list with hyperlinks found in supplied page_soup
or a definite link.
:param url: str - page URL
:param page_soup: BeautifulSoup - page soup
:param definite: str - regular expression to match link
:return: list or str
:rtype: list or str
"""
if not page_soup:
return None if definite else []
if definite is not None:
link = page_soup.find(href=re.compile(definite))
if link:
return cls.expand_link(url, link.get('href'))
return link
else:
links = []
for link in page_soup.find_all('a'):
href = link.get('href')
if href:
links.append(cls.expand_link(url, href))
return links
@classmethod
def expand_link(cls, base_url, link):
"""Expands a given relative link using base URL if required.
:param base_url: str
:param link: str - absolute or relative link
:return: str
:rtype: str
"""
if not link.startswith('http'):
link = urljoin(base_url, link)
return link
def test_configuration(self):
"""This should implement a configuration test, e.g. make test login and report success.
:return: bool
"""
return True
def get_torrent(self, url):
"""This method should be implemented in torrent tracker handler class
and must return .torrent file contents.
:param url: str - URL to download torrent file from
:return: str - torrent file contents
:rtype: str
"""
raise NotImplementedError('`%s` class should implement `get_torrent_file()` method' % self.__class__.__name__)
class GenericTracker(BaseTracker):
"""Generic torrent tracker handler class implementing most common tracker handling methods."""
def get_id_from_link(self, url):
"""Returns forum thread identifier from full thread URL.
:param url: str
:return: str
:rtype: str
"""
return url.split('=')[1]
def get_torrent(self, url):
"""This is the main method which returns torrent file contents
of file located at URL.
:param url: str - URL to find and get torrent from
:return: str or None - torrent file contents
:rtype: str or None
"""
torrent_data = None
download_link = self.get_download_link(url)
if download_link is None:
LOGGER.error('Cannot find torrent file download link at %s', url)
else:
LOGGER.debug('Torrent download link found: %s', download_link)
torrent_data = self.download_torrent(download_link, referer=url)
if torrent_data is None:
LOGGER.debug('Torrent download from `%s` has failed', download_link)
else:
torrent_data = parse_torrent(torrent_data)
return torrent_data
def get_download_link(self, url):
"""Tries to find .torrent file download link on page and return it.
:param url: str - URL to find a download link at.
:return: str or None
:rtype: str or None
"""
raise NotImplementedError('`%s` class should implement `get_download_link()` method' % self.__class__.__name__)
def download_torrent(self, url, referer=None):
"""Returns .torrent file contents from the given URL.
:param url: str - torrent file URL
:param referer: str or None - Referer header value
:return: str or None
:rtype: str or None
"""
raise NotImplementedError('`%s` class should implement `download_torrent()` method' % self.__class__.__name__)
class GenericPublicTracker(GenericTracker):
"""Generic torrent tracker handler class implementing most common handling methods for public trackers."""
login_required = False
def get_id_from_link(self, url):
return url.split('/')[-1]
def download_torrent(self, url, referer=None):
LOGGER.debug('Downloading torrent file from %s ...', url)
# That was a check that user himself visited torrent's page ;)
response = self.get_response(url, referer=referer)
return getattr(response, 'content', None)
class GenericPrivateTracker(GenericPublicTracker):
"""Generic torrent tracker handler class implementing most common handling methods
for private trackers (that require user registration).
"""
login_required = True
login_url = None
"""URL where with login form.
This can include `%(domain)s` marker in place of a domain name when domain mirrors are used
(see `mirrors` attribute of BaseTracker).
"""
# Cookie name to verify that a log in was successful.
auth_cookie_name = None
# HTTP GET (query string) parameter name to verify that a log in was successful. Probably session ID.
auth_qs_param_name = None
def __init__(self, username=None, password=None, cookies=None, query_string=None):
super(GenericPrivateTracker, self).__init__()
self.logged_in = False
# Stores a number of login attempts to prevent recursion.
self.login_counter = 0
self.username = username
self.password = password
if cookies is None:
cookies = {}
self.cookies = cookies
self.query_string = query_string
def get_encode_form_data(self, data):
"""Encode dictionary from get_login_form_data using Tracker page encoding.
:param dict data: :rtype: dict
"""
return {key: self.encode_value(value) for key, value in data.items()}
def get_login_form_data(self, login, password):
"""Should return a dictionary with data to be pushed to authorization form.
:param login:
:param password:
:return: dict
:rtype: dict
"""
return {'username': login, 'password': password}
def test_configuration(self):
return self.login(self.alias)
def login(self, domain):
"""Implements tracker login procedure.
Returns success bool.
:return: bool
:rtype: bool
"""
login_url = self.login_url % {'domain': domain}
LOGGER.debug('Trying to login at %s ...', login_url)
if self.logged_in:
raise TorrtTrackerException('Consecutive login attempt detected at `%s`' % self.__class__.__name__)
if not self.username or self.password is None:
return False
self.login_counter += 1
# No recursion wanted.
if self.login_counter > 1:
return False
allow_redirects = False # Not to loose cookies on the redirect.
if self.auth_qs_param_name:
allow_redirects = True # To be able to get Session ID from query string.
form_data = self.get_login_form_data(self.username, self.password)
form_data = self.get_encode_form_data(form_data)
response = self.get_response(
login_url, form_data,
allow_redirects=allow_redirects, cookies=self.cookies
)
if not response: # e.g. Connection aborted.
return False
# Login success checks.
parsed_qs = parse_qs(urlparse(response.url).query)
if self.auth_cookie_name in response.cookies or self.auth_qs_param_name in parsed_qs:
self.logged_in = True
if parsed_qs:
self.query_string = parsed_qs[self.auth_qs_param_name][0]
self.cookies = response.cookies
# Save auth info to config.
self.save_settings()
LOGGER.debug('Login is successful')
else:
LOGGER.warning('Login with given credentials failed')
return self.logged_in
def before_download(self, url):
"""Used to perform some required actions right before .torrent download.
E.g.: to set a sentinel cookie that allows the download.
:param url: str - torrent file URL
:return:
"""
return None
def get_auth_query_string(self):
"""Returns an auth query string to be passed to get_response()
for auth purposes.
:return: auth string, e.g. sid=1234567890
:rtype: str
"""
query_string = None
if self.auth_qs_param_name:
query_string = '%s=%s' % (self.auth_qs_param_name, self.query_string)
return query_string
def download_torrent(self, url, referer=None):
LOGGER.debug('Downloading torrent file from %s ...', url)
self.before_download(url)
response = self.get_response(
url,
cookies=self.cookies,
query_string=self.get_auth_query_string(),
referer=referer
)
return getattr(response, 'content', None)
class TorrtTrackerException(TorrtException):
"""Base torrt tracker exception. All other tracker related exception should inherit from that."""
| 31.072016 | 119 | 0.617707 |
acf1f80be1c8c319af56fa8c3403bab05f4d7f19 | 743 | py | Python | duplicate_file.py | andrewp-as-is/sublime-duplicate-file | 680abbdf64d725781008a9189fe15f3964726440 | [
"Unlicense"
] | null | null | null | duplicate_file.py | andrewp-as-is/sublime-duplicate-file | 680abbdf64d725781008a9189fe15f3964726440 | [
"Unlicense"
] | null | null | null | duplicate_file.py | andrewp-as-is/sublime-duplicate-file | 680abbdf64d725781008a9189fe15f3964726440 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
import os
from shutil import copyfile
import sublime
import sublime_plugin
class DuplicateFileCommand(sublime_plugin.TextCommand):
def duplicate(self):
src = self.view.file_name()
if not src:
return
filename = "%s copy" % os.path.splitext(os.path.basename(src))[0]
if os.path.splitext(src)[1]:
filename = filename + os.path.splitext(src)[1]
os.path.splitext(os.path.basename(src))
dst = os.path.join(os.path.dirname(src), filename)
copyfile(src, dst)
def run(self,edit):
try:
self.duplicate()
except Exception as e:
msg = "%s\n%s" % (type(e), str(e))
sublime.error_message(msg)
| 28.576923 | 73 | 0.601615 |
acf1f88e5b4f832348d05c74e6375be451b1175d | 5,741 | py | Python | packages/syft/src/syft/proto/core/adp/scalar_pb2.py | vishalbelsare/PySyft | fb04404fcfbef82fad1fb47407b35a24e9afb599 | [
"Apache-1.1"
] | 8,428 | 2017-08-10T09:17:49.000Z | 2022-03-31T08:20:14.000Z | packages/syft/src/syft/proto/core/adp/scalar_pb2.py | vishalbelsare/PySyft | fb04404fcfbef82fad1fb47407b35a24e9afb599 | [
"Apache-1.1"
] | 4,779 | 2017-08-09T23:19:00.000Z | 2022-03-29T11:49:36.000Z | packages/syft/src/syft/proto/core/adp/scalar_pb2.py | vishalbelsare/PySyft | fb04404fcfbef82fad1fb47407b35a24e9afb599 | [
"Apache-1.1"
] | 2,307 | 2017-08-10T08:52:12.000Z | 2022-03-30T05:36:07.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/core/adp/scalar.proto
"""Generated protocol buffer code."""
# third party
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
# syft absolute
from syft.proto.core.adp import entity_pb2 as proto_dot_core_dot_adp_dot_entity__pb2
from syft.proto.core.common import (
common_object_pb2 as proto_dot_core_dot_common_dot_common__object__pb2,
)
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x1bproto/core/adp/scalar.proto\x12\rsyft.core.adp\x1a%proto/core/common/common_object.proto\x1a\x1bproto/core/adp/entity.proto"7\n\x12IntermediateScalar\x12!\n\x02id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID"\xb8\x01\n\nBaseScalar\x12!\n\x02id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x14\n\x07min_val\x18\x02 \x01(\x01H\x00\x88\x01\x01\x12\x12\n\x05value\x18\x03 \x01(\x01H\x01\x88\x01\x01\x12\x14\n\x07max_val\x18\x04 \x01(\x01H\x02\x88\x01\x01\x12%\n\x06\x65ntity\x18\x05 \x01(\x0b\x32\x15.syft.core.adp.EntityB\n\n\x08_min_valB\x08\n\x06_valueB\n\n\x08_max_val"<\n\x17IntermediateGammaScalar\x12!\n\x02id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID"\xc8\x01\n\x0bGammaScalar\x12!\n\x02id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x14\n\x07min_val\x18\x02 \x01(\x01H\x00\x88\x01\x01\x12\x12\n\x05value\x18\x03 \x01(\x01H\x01\x88\x01\x01\x12\x14\n\x07max_val\x18\x04 \x01(\x01H\x02\x88\x01\x01\x12%\n\x06\x65ntity\x18\x05 \x01(\x0b\x32\x15.syft.core.adp.Entity\x12\r\n\x05prime\x18\x06 \x01(\x03\x42\n\n\x08_min_valB\x08\n\x06_valueB\n\n\x08_max_val"\x9b\x01\n\x15IntermediatePhiScalar\x12!\n\x02id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12%\n\x06\x65ntity\x18\x02 \x01(\x0b\x32\x15.syft.core.adp.Entity\x12.\n\x05gamma\x18\x03 \x01(\x0b\x32\x1a.syft.core.adp.GammaScalarH\x00\x88\x01\x01\x42\x08\n\x06_gamma"\xf1\x01\n\tPhiScalar\x12!\n\x02id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x14\n\x07min_val\x18\x02 \x01(\x01H\x00\x88\x01\x01\x12\x12\n\x05value\x18\x03 \x01(\x01H\x01\x88\x01\x01\x12\x14\n\x07max_val\x18\x04 \x01(\x01H\x02\x88\x01\x01\x12%\n\x06\x65ntity\x18\x05 \x01(\x0b\x32\x15.syft.core.adp.Entity\x12.\n\x05gamma\x18\x06 \x01(\x0b\x32\x1a.syft.core.adp.GammaScalarH\x03\x88\x01\x01\x42\n\n\x08_min_valB\x08\n\x06_valueB\n\n\x08_max_valB\x08\n\x06_gammab\x06proto3'
)
_INTERMEDIATESCALAR = DESCRIPTOR.message_types_by_name["IntermediateScalar"]
_BASESCALAR = DESCRIPTOR.message_types_by_name["BaseScalar"]
_INTERMEDIATEGAMMASCALAR = DESCRIPTOR.message_types_by_name["IntermediateGammaScalar"]
_GAMMASCALAR = DESCRIPTOR.message_types_by_name["GammaScalar"]
_INTERMEDIATEPHISCALAR = DESCRIPTOR.message_types_by_name["IntermediatePhiScalar"]
_PHISCALAR = DESCRIPTOR.message_types_by_name["PhiScalar"]
IntermediateScalar = _reflection.GeneratedProtocolMessageType(
"IntermediateScalar",
(_message.Message,),
{
"DESCRIPTOR": _INTERMEDIATESCALAR,
"__module__": "proto.core.adp.scalar_pb2"
# @@protoc_insertion_point(class_scope:syft.core.adp.IntermediateScalar)
},
)
_sym_db.RegisterMessage(IntermediateScalar)
BaseScalar = _reflection.GeneratedProtocolMessageType(
"BaseScalar",
(_message.Message,),
{
"DESCRIPTOR": _BASESCALAR,
"__module__": "proto.core.adp.scalar_pb2"
# @@protoc_insertion_point(class_scope:syft.core.adp.BaseScalar)
},
)
_sym_db.RegisterMessage(BaseScalar)
IntermediateGammaScalar = _reflection.GeneratedProtocolMessageType(
"IntermediateGammaScalar",
(_message.Message,),
{
"DESCRIPTOR": _INTERMEDIATEGAMMASCALAR,
"__module__": "proto.core.adp.scalar_pb2"
# @@protoc_insertion_point(class_scope:syft.core.adp.IntermediateGammaScalar)
},
)
_sym_db.RegisterMessage(IntermediateGammaScalar)
GammaScalar = _reflection.GeneratedProtocolMessageType(
"GammaScalar",
(_message.Message,),
{
"DESCRIPTOR": _GAMMASCALAR,
"__module__": "proto.core.adp.scalar_pb2"
# @@protoc_insertion_point(class_scope:syft.core.adp.GammaScalar)
},
)
_sym_db.RegisterMessage(GammaScalar)
IntermediatePhiScalar = _reflection.GeneratedProtocolMessageType(
"IntermediatePhiScalar",
(_message.Message,),
{
"DESCRIPTOR": _INTERMEDIATEPHISCALAR,
"__module__": "proto.core.adp.scalar_pb2"
# @@protoc_insertion_point(class_scope:syft.core.adp.IntermediatePhiScalar)
},
)
_sym_db.RegisterMessage(IntermediatePhiScalar)
PhiScalar = _reflection.GeneratedProtocolMessageType(
"PhiScalar",
(_message.Message,),
{
"DESCRIPTOR": _PHISCALAR,
"__module__": "proto.core.adp.scalar_pb2"
# @@protoc_insertion_point(class_scope:syft.core.adp.PhiScalar)
},
)
_sym_db.RegisterMessage(PhiScalar)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_INTERMEDIATESCALAR._serialized_start = 114
_INTERMEDIATESCALAR._serialized_end = 169
_BASESCALAR._serialized_start = 172
_BASESCALAR._serialized_end = 356
_INTERMEDIATEGAMMASCALAR._serialized_start = 358
_INTERMEDIATEGAMMASCALAR._serialized_end = 418
_GAMMASCALAR._serialized_start = 421
_GAMMASCALAR._serialized_end = 621
_INTERMEDIATEPHISCALAR._serialized_start = 624
_INTERMEDIATEPHISCALAR._serialized_end = 779
_PHISCALAR._serialized_start = 782
_PHISCALAR._serialized_end = 1023
# @@protoc_insertion_point(module_scope)
| 49.491379 | 1,838 | 0.766591 |
acf1f8a4c143b255a8c9374b166e0675cd20e5e1 | 1,188 | py | Python | Languages/Python/merging_linked_list.py | AyushiPanth/Hacktoberfest | 881ab65e6bd295b92bf86f070bc25a910ceb15a8 | [
"MIT"
] | 4 | 2019-10-12T13:54:20.000Z | 2021-07-06T22:41:12.000Z | Languages/Python/merging_linked_list.py | AyushiPanth/Hacktoberfest | 881ab65e6bd295b92bf86f070bc25a910ceb15a8 | [
"MIT"
] | 1 | 2020-10-01T18:03:45.000Z | 2020-10-01T18:03:45.000Z | Languages/Python/merging_linked_list.py | AyushiPanth/Hacktoberfest | 881ab65e6bd295b92bf86f070bc25a910ceb15a8 | [
"MIT"
] | 8 | 2019-10-14T17:20:09.000Z | 2020-10-03T17:27:49.000Z | # mearging two sorted linked list
# node class - the structure of linked list
class Node:
def __init__(self):
self.value = None
self.next = None
# function for printing linked list
def printList(root):
while root != None:
print(root.value, end = ' ')
root = root.next
print('')
# function for merging sorted linked list
def merge(L1, L2):
# create new linked list pointer
L3 = Node()
prev = L3
# while both linked lists are not empty
while L1 != None and L2 != None:
if L1.value <= L2.value:
prev.next = L1
L1 = L1.next
else:
prev.next = L2
L2 = L2.next
prev = prev.next
# once we reach end of a linked list, append the other
# list because we know it is already sorted
if L1 == None:
prev.next = L2
elif L2 == None:
prev.next = L1
return L3.next
# first linked list
root1 = Node()
n1 = Node()
n2 = Node()
root1.value = 1
root1.next = n1
n1.value = 3
n1.next = n2
n2.value = 10
# second linked list
root2 = Node()
l1 = Node()
l2 = Node()
l3 = Node()
root2.value = 7
root2.next = l1
l1.value = 13
l1.next = l2
l2.value = 18
l2.next = l3
l3.value = 122
root = merge(root1, root2)
printList(root)
| 15.230769 | 56 | 0.633838 |
acf1f8c6da50a629ad4a2ca249ce625f0d20bb26 | 3,476 | py | Python | utils/plot_figures.py | qiuqiangkong/dcase2018_task4 | d165ad27b9e1990256d5b2e73d4fb74826978301 | [
"MIT"
] | 14 | 2018-07-22T21:14:42.000Z | 2021-07-29T03:15:20.000Z | utils/plot_figures.py | qiuqiangkong/dcase2018_task4 | d165ad27b9e1990256d5b2e73d4fb74826978301 | [
"MIT"
] | null | null | null | utils/plot_figures.py | qiuqiangkong/dcase2018_task4 | d165ad27b9e1990256d5b2e73d4fb74826978301 | [
"MIT"
] | null | null | null | import argparse
import matplotlib.pyplot as plt
import os
from features import LogMelExtractor, calculate_logmel, read_meta
import config
def plot_logmel(args):
"""Plot log Mel feature of one audio per class.
"""
# Arguments & parameters
dataset_dir = args.dataset_dir
sample_rate = config.sample_rate
window_size = config.window_size
overlap = config.overlap
seq_len = config.seq_len
mel_bins = config.mel_bins
labels = config.labels
classes_num = len(labels)
plot_num = 12
# Paths
meta_csv = os.path.join(args.dataset_dir, 'metadata', 'train', 'weak.csv')
audios_dir = os.path.join(args.dataset_dir, 'audio', 'train', 'weak')
# Feature extractor
feature_extractor = LogMelExtractor(sample_rate=sample_rate,
window_size=window_size,
overlap=overlap,
mel_bins=mel_bins)
# Calculate log mel feature of audio clips
(audio_names, event_labels) = read_meta(meta_csv)
selected_features_list = []
selected_audio_names = []
selected_labels = []
# Select one audio per class and extract feature
for label in labels:
for (n, audio_name) in enumerate(audio_names):
if label in event_labels[n] and audio_name not in selected_audio_names:
audio_path = os.path.join(audios_dir, audio_name)
feature = calculate_logmel(audio_path=audio_path,
sample_rate=sample_rate,
feature_extractor=feature_extractor,
seq_len=seq_len)
selected_features_list.append(feature)
selected_audio_names.append(audio_name)
selected_labels.append(event_labels[n])
break
# Plot
rows_num = 3
cols_num = 4
n = 0
fig, axs = plt.subplots(rows_num, cols_num, figsize=(10, 5))
for n in range(classes_num):
row = n // cols_num
col = n % cols_num
axs[row, col].matshow(selected_features_list[n].T, origin='lower', aspect='auto', cmap='jet')
axs[row, col].set_title('No. {}, {}'.format(n, selected_labels[n]), fontsize='small')
axs[row, col].set_ylabel('log mel')
axs[row, col].yaxis.set_ticks([])
axs[row, col].xaxis.set_ticks([0, seq_len])
axs[row, col].xaxis.set_ticklabels(['0', '10 s'], fontsize='small')
axs[row, col].xaxis.tick_bottom()
for n in range(classes_num, rows_num * cols_num):
row = n // cols_num
col = n % cols_num
axs[row, col].set_visible(False)
for n in range(classes_num):
print('No. {}, {}'.format(n, selected_audio_names[n]))
fig.tight_layout()
plt.show()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
subparsers = parser.add_subparsers(dest='mode')
parser_plot_logmel = subparsers.add_parser('plot_logmel')
parser_plot_logmel.add_argument('--dataset_dir', type=str)
args = parser.parse_args()
if args.mode == 'plot_logmel':
plot_logmel(args)
else:
raise Exception("Incorrect arguments!") | 32.485981 | 101 | 0.57336 |
acf1f8c98d2a9c692d3cc5bf84bf02adb386c052 | 7,937 | py | Python | grr/client/grr_response_client/vfs.py | xx-sec/grr | 1ed5b821fcdeec97483c75406e09ea3898b12e82 | [
"Apache-2.0"
] | null | null | null | grr/client/grr_response_client/vfs.py | xx-sec/grr | 1ed5b821fcdeec97483c75406e09ea3898b12e82 | [
"Apache-2.0"
] | null | null | null | grr/client/grr_response_client/vfs.py | xx-sec/grr | 1ed5b821fcdeec97483c75406e09ea3898b12e82 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""This file implements a VFS abstraction on the client."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import functools
import platform
from typing import Any, Optional, Callable, Dict, Type
from grr_response_client.vfs_handlers import base as vfs_base
from grr_response_client.vfs_handlers import files # pylint: disable=unused-import
from grr_response_client.vfs_handlers import sleuthkit # pylint: disable=unused-import
# pylint: disable=g-import-not-at-top
if platform.system() == "Windows":
from grr_response_client.vfs_handlers import registry as vfs_registry # pylint: disable=unused-import
else:
vfs_registry = None
from grr_response_core import config
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.util import context
from grr_response_core.lib.util import precondition
# pylint: enable=g-import-not-at-top
VFSHandler = vfs_base.VFSHandler
UnsupportedHandlerError = vfs_base.UnsupportedHandlerError
# A registry of all VFSHandler registered
# TODO: Dictionary keys are of type rdf_paths.PathSpec.PathType,
# but this is currently not representable as type information in Python.
VFS_HANDLERS = {} # type: Dict[Any, Type[vfs_base.VFSHandler]]
# The paths we should use as virtual root for VFS operations.
_VFS_VIRTUALROOTS = {}
def Init():
"""Register all known vfs handlers to open a pathspec types."""
VFS_HANDLERS.clear()
_VFS_VIRTUALROOTS.clear()
vfs_virtualroots = config.CONFIG["Client.vfs_virtualroots"]
VFS_HANDLERS[files.File.supported_pathtype] = files.File
VFS_HANDLERS[files.TempFile.supported_pathtype] = files.TempFile
VFS_HANDLERS[sleuthkit.TSKFile.supported_pathtype] = sleuthkit.TSKFile
if vfs_registry is not None:
VFS_HANDLERS[vfs_registry.RegistryFile
.supported_pathtype] = vfs_registry.RegistryFile
for vfs_virtualroot in vfs_virtualroots:
try:
handler_string, root = vfs_virtualroot.split(":", 1)
except ValueError:
raise ValueError(
"Badly formatted vfs virtual root: %s. Correct format is "
"os:/path/to/virtual_root" % vfs_virtualroot)
handler_string = handler_string.upper()
handler = rdf_paths.PathSpec.PathType.enum_dict.get(handler_string)
if handler is None:
raise ValueError(
"VFSHandler {} could not be registered, because it was not found in"
" PathSpec.PathType {}".format(handler_string,
rdf_paths.PathSpec.PathType.enum_dict))
# We need some translation here, TSK needs an OS virtual root base. For
# every other handler we can just keep the type the same.
if handler == rdf_paths.PathSpec.PathType.TSK:
base_type = rdf_paths.PathSpec.PathType.OS
else:
base_type = handler
_VFS_VIRTUALROOTS[handler] = rdf_paths.PathSpec(
path=root, pathtype=base_type, is_virtualroot=True)
def VFSOpen(pathspec,
progress_callback = None
):
"""Expands pathspec to return an expanded Path.
A pathspec is a specification of how to access the file by recursively opening
each part of the path by different drivers. For example the following
pathspec:
pathtype: OS
path: "/dev/sda1"
nested_path {
pathtype: TSK
path: "/home/image2.img"
nested_path {
pathtype: TSK
path: "/home/a.txt"
}
}
Instructs the system to:
1) open /dev/sda1 using the OS driver.
2) Pass the obtained filelike object to the TSK driver to open
"/home/image2.img".
3) The obtained filelike object should be passed to the TSK driver to open
"/home/a.txt".
The problem remains how to get to this expanded path specification. Since the
server is not aware of all the files on the client, the server may request
this:
pathtype: OS
path: "/dev/sda1"
nested_path {
pathtype: TSK
path: "/home/image2.img/home/a.txt"
}
Or even this:
pathtype: OS
path: "/dev/sda1/home/image2.img/home/a.txt"
This function converts the pathspec requested by the server into an expanded
pathspec required to actually open the file. This is done by expanding each
component of the pathspec in turn.
Expanding the component is done by opening each leading directory in turn and
checking if it is a directory of a file. If its a file, we examine the file
headers to determine the next appropriate driver to use, and create a nested
pathspec.
Note that for some clients there might be a virtual root specified. This
is a directory that gets prepended to all pathspecs of a given
pathtype. For example if there is a virtual root defined as
["os:/virtualroot"], a path specification like
pathtype: OS
path: "/home/user/*"
will get translated into
pathtype: OS
path: "/virtualroot"
is_virtualroot: True
nested_path {
pathtype: OS
path: "/dev/sda1"
}
Args:
pathspec: A Path() protobuf to normalize.
progress_callback: A callback to indicate that the open call is still
working but needs more time.
Returns:
The open filelike object. This will contain the expanded Path() protobuf as
the member fd.pathspec.
Raises:
IOError: if one of the path components can not be opened.
"""
# Initialize the dictionary of VFS handlers lazily, if not yet done.
if not VFS_HANDLERS:
Init()
fd = None
# Adjust the pathspec in case we are using a vfs_virtualroot.
vroot = _VFS_VIRTUALROOTS.get(pathspec.pathtype)
# If we have a virtual root for this vfs handler, we need to prepend
# it to the incoming pathspec except if the pathspec is explicitly
# marked as containing a virtual root already or if it isn't marked but
# the path already contains the virtual root.
if (not vroot or pathspec.is_virtualroot or
pathspec.CollapsePath().startswith(vroot.CollapsePath())):
# No virtual root but opening changes the pathspec so we always work on a
# copy.
working_pathspec = pathspec.Copy()
else:
# We're in a virtual root, put the target pathspec inside the virtual root
# as a nested path.
working_pathspec = vroot.Copy()
working_pathspec.last.nested_path = pathspec.Copy()
# For each pathspec step, we get the handler for it and instantiate it with
# the old object, and the current step.
while working_pathspec:
component = working_pathspec.Pop()
try:
handler = VFS_HANDLERS[component.pathtype]
except KeyError:
raise UnsupportedHandlerError(component.pathtype)
# Open the component.
fd = handler.Open(
fd=fd,
component=component,
handlers=dict(VFS_HANDLERS),
pathspec=working_pathspec,
progress_callback=progress_callback)
if fd is None:
raise ValueError("VFSOpen cannot be called with empty PathSpec.")
return fd
def VFSMultiOpen(pathspecs, progress_callback=None):
"""Opens multiple files specified by given path-specs.
See documentation for `VFSOpen` for more information.
Args:
pathspecs: A list of pathspec instances of files to open.
progress_callback: A callback function to call to notify about progress
Returns:
A context manager yielding file-like objects.
"""
precondition.AssertIterableType(pathspecs, rdf_paths.PathSpec)
vfs_open = functools.partial(VFSOpen, progress_callback=progress_callback)
return context.MultiContext(list(map(vfs_open, pathspecs)))
def ReadVFS(pathspec, offset, length, progress_callback=None):
"""Read from the VFS and return the contents.
Args:
pathspec: path to read from
offset: number of bytes to skip
length: number of bytes to read
progress_callback: A callback to indicate that the open call is still
working but needs more time.
Returns:
VFS file contents
"""
fd = VFSOpen(pathspec, progress_callback=progress_callback)
fd.Seek(offset)
return fd.Read(length)
| 32.797521 | 104 | 0.732393 |
acf1f9620677cee5e93f2ae9c26a551f610168fe | 8,302 | py | Python | kiwi/wallet/util/debug_spend_bundle.py | KiwiNetworkOrg/chia-blockchain | 90ec3a0fd7b50f5b98b5b0478e28ebbe6f8e8a71 | [
"Apache-2.0"
] | 8 | 2021-08-21T03:10:13.000Z | 2022-02-09T04:30:05.000Z | kiwi/wallet/util/debug_spend_bundle.py | KiwiNetworkOrg/chia-blockchain | 90ec3a0fd7b50f5b98b5b0478e28ebbe6f8e8a71 | [
"Apache-2.0"
] | null | null | null | kiwi/wallet/util/debug_spend_bundle.py | KiwiNetworkOrg/chia-blockchain | 90ec3a0fd7b50f5b98b5b0478e28ebbe6f8e8a71 | [
"Apache-2.0"
] | 1 | 2021-09-03T02:30:26.000Z | 2021-09-03T02:30:26.000Z | from typing import List, Tuple
from blspy import AugSchemeMPL
from clvm import KEYWORD_FROM_ATOM
from clvm_tools.binutils import disassemble as bu_disassemble
from kiwi.types.blockchain_format.coin import Coin
from kiwi.types.blockchain_format.program import Program, INFINITE_COST
from kiwi.types.blockchain_format.sized_bytes import bytes32
from kiwi.types.condition_opcodes import ConditionOpcode
from kiwi.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from kiwi.util.hash import std_hash
CONDITIONS = dict((k, bytes(v)[0]) for k, v in ConditionOpcode.__members__.items()) # pylint: disable=E1101
KFA = {v: k for k, v in CONDITIONS.items()}
# information needed to spend a cc
# if we ever support more genesis conditions, like a re-issuable coin,
# we may need also to save the `genesis_coin_mod` or its hash
def disassemble(sexp):
"""
This version of `disassemble` also disassembles condition opcodes like `ASSERT_ANNOUNCEMENT_CONSUMED`.
"""
kfa = dict(KEYWORD_FROM_ATOM)
kfa.update((Program.to(k).as_atom(), v) for k, v in KFA.items())
return bu_disassemble(sexp, kfa)
def coin_as_program(coin: Coin) -> Program:
"""
Convenience function for when putting `coin_info` into a solution.
"""
return Program.to([coin.parent_coin_info, coin.puzzle_hash, coin.amount])
def dump_coin(coin: Coin) -> str:
return disassemble(coin_as_program(coin))
def debug_spend_bundle(spend_bundle, agg_sig_additional_data=bytes([3] * 32)) -> None:
"""
Print a lot of useful information about a `SpendBundle` that might help with debugging
its clvm.
"""
pks = []
msgs = []
created_coin_announcements: List[List[bytes]] = []
asserted_coin_announcements = []
created_puzzle_announcements: List[List[bytes]] = []
asserted_puzzle_announcements = []
print("=" * 80)
for coin_spend in spend_bundle.coin_spends:
coin = coin_spend.coin
puzzle_reveal = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
solution = Program.from_bytes(bytes(coin_spend.solution))
coin_name = coin.name()
if puzzle_reveal.get_tree_hash() != coin_spend.coin.puzzle_hash:
print("*** BAD PUZZLE REVEAL")
print(f"{puzzle_reveal.get_tree_hash().hex()} vs {coin_spend.coin.puzzle_hash.hex()}")
print("*" * 80)
breakpoint()
continue
print(f"consuming coin {dump_coin(coin)}")
print(f" with id {coin_name}")
print()
print(f"\nbrun -y main.sym '{bu_disassemble(puzzle_reveal)}' '{bu_disassemble(solution)}'")
error, conditions, cost = conditions_dict_for_solution(puzzle_reveal, solution, INFINITE_COST)
if error:
print(f"*** error {error}")
elif conditions is not None:
for pk, m in pkm_pairs_for_conditions_dict(conditions, coin_name, agg_sig_additional_data):
pks.append(pk)
msgs.append(m)
print()
cost, r = puzzle_reveal.run_with_cost(INFINITE_COST, solution) # type: ignore
print(disassemble(r))
print()
if conditions and len(conditions) > 0:
print("grouped conditions:")
for condition_programs in conditions.values():
print()
for c in condition_programs:
if len(c.vars) == 1:
as_prog = Program.to([c.opcode, c.vars[0]])
if len(c.vars) == 2:
as_prog = Program.to([c.opcode, c.vars[0], c.vars[1]])
print(f" {disassemble(as_prog)}")
created_coin_announcements.extend(
[coin_name] + _.vars for _ in conditions.get(ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, [])
)
asserted_coin_announcements.extend(
[_.vars[0].hex() for _ in conditions.get(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [])]
)
created_puzzle_announcements.extend(
[puzzle_reveal.get_tree_hash()] + _.vars
for _ in conditions.get(ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, [])
)
asserted_puzzle_announcements.extend(
[_.vars[0].hex() for _ in conditions.get(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [])]
)
print()
else:
print("(no output conditions generated)")
print()
print("-------")
created = set(spend_bundle.additions())
spent = set(spend_bundle.removals())
zero_coin_set = set(coin.name() for coin in created if coin.amount == 0)
ephemeral = created.intersection(spent)
created.difference_update(ephemeral)
spent.difference_update(ephemeral)
print()
print("spent coins")
for coin in sorted(spent, key=lambda _: _.name()):
print(f" {dump_coin(coin)}")
print(f" => spent coin id {coin.name()}")
print()
print("created coins")
for coin in sorted(created, key=lambda _: _.name()):
print(f" {dump_coin(coin)}")
print(f" => created coin id {coin.name()}")
if ephemeral:
print()
print("ephemeral coins")
for coin in sorted(ephemeral, key=lambda _: _.name()):
print(f" {dump_coin(coin)}")
print(f" => created coin id {coin.name()}")
created_coin_announcement_pairs = [(_, std_hash(b"".join(_)).hex()) for _ in created_coin_announcements]
if created_coin_announcement_pairs:
print("created coin announcements")
for announcement, hashed in sorted(created_coin_announcement_pairs, key=lambda _: _[-1]):
as_hex = [f"0x{_.hex()}" for _ in announcement]
print(f" {as_hex} =>\n {hashed}")
eor_coin_announcements = sorted(
set(_[-1] for _ in created_coin_announcement_pairs) ^ set(asserted_coin_announcements)
)
created_puzzle_announcement_pairs = [(_, std_hash(b"".join(_)).hex()) for _ in created_puzzle_announcements]
if created_puzzle_announcements:
print("created puzzle announcements")
for announcement, hashed in sorted(created_puzzle_announcement_pairs, key=lambda _: _[-1]):
as_hex = [f"0x{_.hex()}" for _ in announcement]
print(f" {as_hex} =>\n {hashed}")
eor_puzzle_announcements = sorted(
set(_[-1] for _ in created_puzzle_announcement_pairs) ^ set(asserted_puzzle_announcements)
)
print()
print()
print(f"zero_coin_set = {sorted(zero_coin_set)}")
print()
if created_coin_announcement_pairs or asserted_coin_announcements:
print(f"created coin announcements = {sorted([_[-1] for _ in created_coin_announcement_pairs])}")
print()
print(f"asserted coin announcements = {sorted(asserted_coin_announcements)}")
print()
print(f"symdiff of coin announcements = {sorted(eor_coin_announcements)}")
print()
if created_puzzle_announcement_pairs or asserted_puzzle_announcements:
print(f"created puzzle announcements = {sorted([_[-1] for _ in created_puzzle_announcement_pairs])}")
print()
print(f"asserted puzzle announcements = {sorted(asserted_puzzle_announcements)}")
print()
print(f"symdiff of puzzle announcements = {sorted(eor_puzzle_announcements)}")
print()
print()
print("=" * 80)
print()
validates = AugSchemeMPL.aggregate_verify(pks, msgs, spend_bundle.aggregated_signature)
print(f"aggregated signature check pass: {validates}")
print(f"pks: {pks}")
print(f"msgs: {[msg.hex() for msg in msgs]}")
print(f" msg_data: {[msg.hex()[:-128] for msg in msgs]}")
print(f" coin_ids: {[msg.hex()[-128:-64] for msg in msgs]}")
print(f" add_data: {[msg.hex()[-64:] for msg in msgs]}")
print(f"signature: {spend_bundle.aggregated_signature}")
def solution_for_pay_to_any(puzzle_hash_amount_pairs: Tuple[bytes32, int]) -> Program:
output_conditions = [
[ConditionOpcode.CREATE_COIN, puzzle_hash, amount] for puzzle_hash, amount in puzzle_hash_amount_pairs
]
return Program.to(output_conditions)
| 41.718593 | 112 | 0.642978 |
acf1fbd8ca4ebe511b3560850ca55bf7fe66fc3b | 3,374 | py | Python | venv/lib/python3.8/site-packages/ansible_collections/cisco/ise/plugins/modules/sxp_connections.py | saeedya/docker-ansible | 6fb0cfc6bc4a5925b21380952a5a4502ec02119a | [
"Apache-2.0"
] | null | null | null | venv/lib/python3.8/site-packages/ansible_collections/cisco/ise/plugins/modules/sxp_connections.py | saeedya/docker-ansible | 6fb0cfc6bc4a5925b21380952a5a4502ec02119a | [
"Apache-2.0"
] | null | null | null | venv/lib/python3.8/site-packages/ansible_collections/cisco/ise/plugins/modules/sxp_connections.py | saeedya/docker-ansible | 6fb0cfc6bc4a5925b21380952a5a4502ec02119a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Cisco Systems
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
DOCUMENTATION = r"""
---
module: sxp_connections
short_description: Resource module for Sxp Connections
description:
- Manage operations create, update and delete of the resource Sxp Connections.
version_added: '1.0.0'
extends_documentation_fragment:
- cisco.ise.module
author: Rafael Campos (@racampos)
options:
description:
description: Sxp Connections's description.
type: str
enabled:
description: Enabled flag.
type: bool
id:
description: Sxp Connections's id.
type: str
ipAddress:
description: Sxp Connections's ipAddress.
type: str
sxpMode:
description: Sxp Connections's sxpMode.
type: str
sxpNode:
description: Sxp Connections's sxpNode.
type: str
sxpPeer:
description: Sxp Connections's sxpPeer.
type: str
sxpVersion:
description: Sxp Connections's sxpVersion.
type: str
sxpVpn:
description: Sxp Connections's sxpVpn.
type: str
requirements:
- ciscoisesdk >= 1.1.0
- python >= 3.5
seealso:
# Reference by Internet resource
- name: Sxp Connections reference
description: Complete reference of the Sxp Connections object model.
link: https://ciscoisesdk.readthedocs.io/en/latest/api/api.html#v3-0-0-summary
"""
EXAMPLES = r"""
- name: Update by id
cisco.ise.sxp_connections:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: present
description: string
enabled: true
id: string
ipAddress: string
sxpMode: string
sxpNode: string
sxpPeer: string
sxpVersion: string
sxpVpn: string
- name: Delete by id
cisco.ise.sxp_connections:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: absent
id: string
- name: Create
cisco.ise.sxp_connections:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: present
description: string
enabled: true
ipAddress: string
sxpMode: string
sxpNode: string
sxpPeer: string
sxpVersion: string
sxpVpn: string
"""
RETURN = r"""
ise_response:
description: A dictionary or list with the response returned by the Cisco ISE Python SDK
returned: always
type: dict
sample: >
{
"id": "string",
"description": "string",
"sxpPeer": "string",
"sxpVpn": "string",
"sxpNode": "string",
"ipAddress": "string",
"sxpMode": "string",
"sxpVersion": "string",
"enabled": true,
"link": {
"rel": "string",
"href": "string",
"type": "string"
}
}
ise_update_response:
description: A dictionary or list with the response returned by the Cisco ISE Python SDK
returned: always
version_added: "1.1.0"
type: dict
sample: >
{
"UpdatedFieldsList": {
"updatedField": {
"field": "string",
"oldValue": "string",
"newValue": "string"
},
"field": "string",
"oldValue": "string",
"newValue": "string"
}
}
"""
| 23.760563 | 92 | 0.645525 |
acf1fe50f54d6bb7184a53cf8c5504d3cf095a95 | 20 | py | Python | tests/mock/__init__.py | andersop91/core | 0e0ef0aa17073609eae7c974cf4c73306b7c414b | [
"Apache-2.0"
] | 22,481 | 2020-03-02T13:09:59.000Z | 2022-03-31T23:34:28.000Z | tests/mock/__init__.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 31,101 | 2020-03-02T13:00:16.000Z | 2022-03-31T23:57:36.000Z | tests/mock/__init__.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 11,411 | 2020-03-02T14:19:20.000Z | 2022-03-31T22:46:07.000Z | """Mock helpers."""
| 10 | 19 | 0.55 |
acf1ff4687954a2992193ffd6b021789dee4a50a | 20,286 | py | Python | gremlin-python/src/main/jython/gremlin_python/structure/io/graphsonV3d0.py | IsaacBoy/tinkerpop | f799335a29e5da98644b75e8d5d9aea810bc6262 | [
"Apache-2.0"
] | null | null | null | gremlin-python/src/main/jython/gremlin_python/structure/io/graphsonV3d0.py | IsaacBoy/tinkerpop | f799335a29e5da98644b75e8d5d9aea810bc6262 | [
"Apache-2.0"
] | null | null | null | gremlin-python/src/main/jython/gremlin_python/structure/io/graphsonV3d0.py | IsaacBoy/tinkerpop | f799335a29e5da98644b75e8d5d9aea810bc6262 | [
"Apache-2.0"
] | null | null | null | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import datetime
import json
import time
import uuid
import math
from collections import OrderedDict
import logging
import six
from aenum import Enum
from gremlin_python import statics
from gremlin_python.statics import FloatType, FunctionType, IntType, LongType, TypeType, DictType, ListType, SetType
from gremlin_python.process.traversal import Binding, Bytecode, P, TextP, Traversal, Traverser, TraversalStrategy, T
from gremlin_python.structure.graph import Edge, Property, Vertex, VertexProperty, Path
log = logging.getLogger(__name__)
# When we fall back to a superclass's serializer, we iterate over this map.
# We want that iteration order to be consistent, so we use an OrderedDict,
# not a dict.
_serializers = OrderedDict()
_deserializers = {}
class GraphSONTypeType(type):
def __new__(mcs, name, bases, dct):
cls = super(GraphSONTypeType, mcs).__new__(mcs, name, bases, dct)
if not name.startswith('_'):
if cls.python_type:
_serializers[cls.python_type] = cls
if cls.graphson_type:
_deserializers[cls.graphson_type] = cls
return cls
class GraphSONUtil(object):
TYPE_KEY = "@type"
VALUE_KEY = "@value"
@classmethod
def typedValue(cls, type_name, value, prefix="g"):
out = {cls.TYPE_KEY: cls.formatType(prefix, type_name)}
if value is not None:
out[cls.VALUE_KEY] = value
return out
@classmethod
def formatType(cls, prefix, type_name):
return "%s:%s" % (prefix, type_name)
# Read/Write classes split to follow precedence of the Java API
class GraphSONWriter(object):
def __init__(self, serializer_map=None):
"""
:param serializer_map: map from Python type to serializer instance implementing `dictify`
"""
self.serializers = _serializers.copy()
if serializer_map:
self.serializers.update(serializer_map)
def writeObject(self, objectData):
# to JSON
return json.dumps(self.toDict(objectData), separators=(',', ':'))
def toDict(self, obj):
"""
Encodes python objects in GraphSON type-tagged dict values
"""
try:
return self.serializers[type(obj)].dictify(obj, self)
except KeyError:
for key, serializer in self.serializers.items():
if isinstance(obj, key):
return serializer.dictify(obj, self)
if isinstance(obj, dict):
return dict((self.toDict(k), self.toDict(v)) for k, v in obj.items())
elif isinstance(obj, set):
return set([self.toDict(o) for o in obj])
elif isinstance(obj, list):
return [self.toDict(o) for o in obj]
else:
return obj
class GraphSONReader(object):
def __init__(self, deserializer_map=None):
"""
:param deserializer_map: map from GraphSON type tag to deserializer instance implementing `objectify`
"""
self.deserializers = _deserializers.copy()
if deserializer_map:
self.deserializers.update(deserializer_map)
def readObject(self, jsonData):
# from JSON
return self.toObject(json.loads(jsonData))
def toObject(self, obj):
"""
Unpacks GraphSON type-tagged dict values into objects mapped in self.deserializers
"""
if isinstance(obj, dict):
try:
return self.deserializers[obj[GraphSONUtil.TYPE_KEY]].objectify(obj[GraphSONUtil.VALUE_KEY], self)
except KeyError:
pass
return dict((self.toObject(k), self.toObject(v)) for k, v in obj.items())
elif isinstance(obj, set):
return set([self.toObject(o) for o in obj])
elif isinstance(obj, list):
return [self.toObject(o) for o in obj]
else:
return obj
@six.add_metaclass(GraphSONTypeType)
class _GraphSONTypeIO(object):
python_type = None
graphson_type = None
symbolMap = {"global_": "global", "as_": "as", "in_": "in", "and_": "and",
"or_": "or", "is_": "is", "not_": "not", "from_": "from",
"set_": "set", "list_": "list", "all_": "all", "with_": "with"}
@classmethod
def unmangleKeyword(cls, symbol):
return cls.symbolMap.get(symbol, symbol)
def dictify(self, obj, writer):
raise NotImplementedError()
def objectify(self, d, reader):
raise NotImplementedError()
class _BytecodeSerializer(_GraphSONTypeIO):
@classmethod
def _dictify_instructions(cls, instructions, writer):
out = []
for instruction in instructions:
inst = [instruction[0]]
inst.extend(writer.toDict(arg) for arg in instruction[1:])
out.append(inst)
return out
@classmethod
def dictify(cls, bytecode, writer):
if isinstance(bytecode, Traversal):
bytecode = bytecode.bytecode
out = {}
if bytecode.source_instructions:
out["source"] = cls._dictify_instructions(bytecode.source_instructions, writer)
if bytecode.step_instructions:
out["step"] = cls._dictify_instructions(bytecode.step_instructions, writer)
return GraphSONUtil.typedValue("Bytecode", out)
class TraversalSerializer(_BytecodeSerializer):
python_type = Traversal
class BytecodeSerializer(_BytecodeSerializer):
python_type = Bytecode
class VertexSerializer(_GraphSONTypeIO):
python_type = Vertex
graphson_type = "g:Vertex"
@classmethod
def dictify(cls, vertex, writer):
return GraphSONUtil.typedValue("Vertex", {"id": writer.toDict(vertex.id),
"label": writer.toDict(vertex.label)})
class EdgeSerializer(_GraphSONTypeIO):
python_type = Edge
graphson_type = "g:Edge"
@classmethod
def dictify(cls, edge, writer):
return GraphSONUtil.typedValue("Edge", {"id": writer.toDict(edge.id),
"outV": writer.toDict(edge.outV.id),
"outVLabel": writer.toDict(edge.outV.label),
"label": writer.toDict(edge.label),
"inV": writer.toDict(edge.inV.id),
"inVLabel": writer.toDict(edge.inV.label)})
class VertexPropertySerializer(_GraphSONTypeIO):
python_type = VertexProperty
graphson_type = "g:VertexProperty"
@classmethod
def dictify(cls, vertex_property, writer):
return GraphSONUtil.typedValue("VertexProperty", {"id": writer.toDict(vertex_property.id),
"label": writer.toDict(vertex_property.label),
"value": writer.toDict(vertex_property.value),
"vertex": writer.toDict(vertex_property.vertex.id)})
class PropertySerializer(_GraphSONTypeIO):
python_type = Property
graphson_type = "g:Property"
@classmethod
def dictify(cls, property, writer):
elementDict = writer.toDict(property.element)
if elementDict is not None:
valueDict = elementDict["@value"]
if "outVLabel" in valueDict:
del valueDict["outVLabel"]
if "inVLabel" in valueDict:
del valueDict["inVLabel"]
if "properties" in valueDict:
del valueDict["properties"]
if "value" in valueDict:
del valueDict["value"]
return GraphSONUtil.typedValue("Property", {"key": writer.toDict(property.key),
"value": writer.toDict(property.value),
"element": elementDict})
class TraversalStrategySerializer(_GraphSONTypeIO):
python_type = TraversalStrategy
@classmethod
def dictify(cls, strategy, writer):
configuration = {}
for key in strategy.configuration:
configuration[key] = writer.toDict(strategy.configuration[key])
return GraphSONUtil.typedValue(strategy.strategy_name, configuration)
class TraverserIO(_GraphSONTypeIO):
python_type = Traverser
graphson_type = "g:Traverser"
@classmethod
def dictify(cls, traverser, writer):
return GraphSONUtil.typedValue("Traverser", {"value": writer.toDict(traverser.object),
"bulk": writer.toDict(traverser.bulk)})
@classmethod
def objectify(cls, d, reader):
return Traverser(reader.toObject(d["value"]),
reader.toObject(d["bulk"]))
class EnumSerializer(_GraphSONTypeIO):
python_type = Enum
@classmethod
def dictify(cls, enum, _):
return GraphSONUtil.typedValue(cls.unmangleKeyword(type(enum).__name__),
cls.unmangleKeyword(str(enum.name)))
class PSerializer(_GraphSONTypeIO):
python_type = P
@classmethod
def dictify(cls, p, writer):
out = {"predicate": p.operator,
"value": [writer.toDict(p.value), writer.toDict(p.other)] if p.other is not None else
writer.toDict(p.value)}
return GraphSONUtil.typedValue("P", out)
class TextPSerializer(_GraphSONTypeIO):
python_type = TextP
@classmethod
def dictify(cls, p, writer):
out = {"predicate": p.operator,
"value": [writer.toDict(p.value), writer.toDict(p.other)] if p.other is not None else
writer.toDict(p.value)}
return GraphSONUtil.typedValue("TextP", out)
class BindingSerializer(_GraphSONTypeIO):
python_type = Binding
@classmethod
def dictify(cls, binding, writer):
out = {"key": binding.key,
"value": writer.toDict(binding.value)}
return GraphSONUtil.typedValue("Binding", out)
class LambdaSerializer(_GraphSONTypeIO):
python_type = FunctionType
@classmethod
def dictify(cls, lambda_object, writer):
lambda_result = lambda_object()
script = lambda_result if isinstance(lambda_result, str) else lambda_result[0]
language = statics.default_lambda_language if isinstance(lambda_result, str) else lambda_result[1]
out = {"script": script,
"language": language}
if language == "gremlin-jython" or language == "gremlin-python":
if not script.strip().startswith("lambda"):
script = "lambda " + script
out["script"] = script
out["arguments"] = six.get_function_code(eval(out["script"])).co_argcount
else:
out["arguments"] = -1
return GraphSONUtil.typedValue("Lambda", out)
class TypeSerializer(_GraphSONTypeIO):
python_type = TypeType
@classmethod
def dictify(cls, typ, writer):
return writer.toDict(typ())
class UUIDIO(_GraphSONTypeIO):
python_type = uuid.UUID
graphson_type = "g:UUID"
graphson_base_type = "UUID"
@classmethod
def dictify(cls, obj, writer):
return GraphSONUtil.typedValue(cls.graphson_base_type, str(obj))
@classmethod
def objectify(cls, d, reader):
return cls.python_type(d)
class DateIO(_GraphSONTypeIO):
python_type = datetime.datetime
graphson_type = "g:Date"
graphson_base_type = "Date"
@classmethod
def dictify(cls, obj, writer):
if six.PY3:
pts = obj.timestamp()
else:
# Hack for legacy Python
# timestamp() in Python 3.3
pts = time.mktime((obj.year, obj.month, obj.day,
obj.hour, obj.minute, obj.second,
-1, -1, -1)) + obj.microsecond / 1e6
# Java timestamp expects miliseconds
# Have to use int because of legacy Python
ts = int(round(pts * 1000))
return GraphSONUtil.typedValue(cls.graphson_base_type, ts)
@classmethod
def objectify(cls, ts, reader):
# Python timestamp expects seconds
return datetime.datetime.fromtimestamp(ts / 1000.0)
# Based on current implementation, this class must always be declared before FloatIO.
# Seems pretty fragile for future maintainers. Maybe look into this.
class TimestampIO(_GraphSONTypeIO):
"""A timestamp in Python is type float"""
python_type = statics.timestamp
graphson_type = "g:Timestamp"
graphson_base_type = "Timestamp"
@classmethod
def dictify(cls, obj, writer):
# Java timestamp expects milliseconds integer
# Have to use int because of legacy Python
ts = int(round(obj * 1000))
return GraphSONUtil.typedValue(cls.graphson_base_type, ts)
@classmethod
def objectify(cls, ts, reader):
# Python timestamp expects seconds
return cls.python_type(ts / 1000.0)
class _NumberIO(_GraphSONTypeIO):
@classmethod
def dictify(cls, n, writer):
if isinstance(n, bool): # because isinstance(False, int) and isinstance(True, int)
return n
return GraphSONUtil.typedValue(cls.graphson_base_type, n)
@classmethod
def objectify(cls, v, _):
return cls.python_type(v)
class ListIO(_GraphSONTypeIO):
python_type = ListType
graphson_type = "g:List"
@classmethod
def dictify(cls, l, writer):
new_list = []
for obj in l:
new_list.append(writer.toDict(obj))
return GraphSONUtil.typedValue("List", new_list)
@classmethod
def objectify(cls, l, reader):
new_list = []
for obj in l:
new_list.append(reader.toObject(obj))
return new_list
class SetIO(_GraphSONTypeIO):
python_type = SetType
graphson_type = "g:Set"
@classmethod
def dictify(cls, s, writer):
new_list = []
for obj in s:
new_list.append(writer.toDict(obj))
return GraphSONUtil.typedValue("Set", new_list)
@classmethod
def objectify(cls, s, reader):
"""
By default, returns a python set
In case Java returns numeric values of different types which
python don't recognize, coerce and return a list.
See comments of TINKERPOP-1844 for more details
"""
new_list = [reader.toObject(obj) for obj in s]
new_set = set(new_list)
if len(new_list) != len(new_set):
log.warning("Coercing g:Set to list due to java numeric values. "
"See TINKERPOP-1844 for more details.")
return new_list
return new_set
class MapType(_GraphSONTypeIO):
python_type = DictType
graphson_type = "g:Map"
@classmethod
def dictify(cls, d, writer):
l = []
for key in d:
l.append(writer.toDict(key))
l.append(writer.toDict(d[key]))
return GraphSONUtil.typedValue("Map", l)
@classmethod
def objectify(cls, l, reader):
new_dict = {}
if len(l) > 0:
x = 0
while x < len(l):
new_dict[reader.toObject(l[x])] = reader.toObject(l[x + 1])
x = x + 2
return new_dict
class BulkSetIO(_GraphSONTypeIO):
graphson_type = "g:BulkSet"
@classmethod
def objectify(cls, l, reader):
new_list = []
# this approach basically mimics what currently existed in 3.3.4 and prior versions where BulkSet is
# basically just coerced to list. the limitation here is that if the value of a bulk exceeds the size of
# a list (into the long space) then stuff won't work nice.
if len(l) > 0:
x = 0
while x < len(l):
obj = reader.toObject(l[x])
bulk = reader.toObject(l[x + 1])
for y in range(bulk):
new_list.append(obj)
x = x + 2
return new_list
class FloatIO(_NumberIO):
python_type = FloatType
graphson_type = "g:Float"
graphson_base_type = "Float"
@classmethod
def dictify(cls, n, writer):
if isinstance(n, bool): # because isinstance(False, int) and isinstance(True, int)
return n
elif math.isnan(n):
return GraphSONUtil.typedValue(cls.graphson_base_type, "NaN")
elif math.isinf(n) and n > 0:
return GraphSONUtil.typedValue(cls.graphson_base_type, "Infinity")
elif math.isinf(n) and n < 0:
return GraphSONUtil.typedValue(cls.graphson_base_type, "-Infinity")
else:
return GraphSONUtil.typedValue(cls.graphson_base_type, n)
@classmethod
def objectify(cls, v, _):
if isinstance(v, str):
if v == 'NaN':
return float('nan')
elif v == "Infinity":
return float('inf')
elif v == "-Infinity":
return float('-inf')
return cls.python_type(v)
class DoubleIO(FloatIO):
graphson_type = "g:Double"
graphson_base_type = "Double"
class Int64IO(_NumberIO):
python_type = LongType
graphson_type = "g:Int64"
graphson_base_type = "Int64"
class Int32IO(_NumberIO):
python_type = IntType
graphson_type = "g:Int32"
graphson_base_type = "Int32"
@classmethod
def dictify(cls, n, writer):
if isinstance(n, bool):
return n
return GraphSONUtil.typedValue(cls.graphson_base_type, n)
class VertexDeserializer(_GraphSONTypeIO):
graphson_type = "g:Vertex"
@classmethod
def objectify(cls, d, reader):
return Vertex(reader.toObject(d["id"]), d.get("label", "vertex"))
class EdgeDeserializer(_GraphSONTypeIO):
graphson_type = "g:Edge"
@classmethod
def objectify(cls, d, reader):
return Edge(reader.toObject(d["id"]),
Vertex(reader.toObject(d["outV"]), d.get("outVLabel", "vertex")),
d.get("label", "edge"),
Vertex(reader.toObject(d["inV"]), d.get("inVLabel", "vertex")))
class VertexPropertyDeserializer(_GraphSONTypeIO):
graphson_type = "g:VertexProperty"
@classmethod
def objectify(cls, d, reader):
vertex = Vertex(reader.toObject(d.get("vertex"))) if "vertex" in d else None
return VertexProperty(reader.toObject(d["id"]),
d["label"],
reader.toObject(d["value"]),
vertex)
class PropertyDeserializer(_GraphSONTypeIO):
graphson_type = "g:Property"
@classmethod
def objectify(cls, d, reader):
element = reader.toObject(d["element"]) if "element" in d else None
return Property(d["key"], reader.toObject(d["value"]), element)
class PathDeserializer(_GraphSONTypeIO):
graphson_type = "g:Path"
@classmethod
def objectify(cls, d, reader):
return Path(reader.toObject(d["labels"]), reader.toObject(d["objects"]))
class TDeserializer(_GraphSONTypeIO):
graphson_type = "g:T"
@classmethod
def objectify(cls, d, reader):
return T[d]
class TraversalMetricsDeserializer(_GraphSONTypeIO):
graphson_type = "g:TraversalMetrics"
@classmethod
def objectify(cls, d, reader):
return reader.toObject(d)
class MetricsDeserializer(_GraphSONTypeIO):
graphson_type = "g:Metrics"
@classmethod
def objectify(cls, d, reader):
return reader.toObject(d)
| 32.2 | 116 | 0.619097 |
acf1ffd16f1aa71ffec18efdf658a964d0a681d8 | 8,695 | py | Python | tests/components/harmony/test_config_flow.py | squirrel289/core | 6c5bcbfc3ee40927458e9188d6b79bf63933d3f9 | [
"Apache-2.0"
] | 5 | 2020-09-17T21:47:23.000Z | 2021-06-04T04:37:29.000Z | tests/components/harmony/test_config_flow.py | squirrel289/core | 6c5bcbfc3ee40927458e9188d6b79bf63933d3f9 | [
"Apache-2.0"
] | 47 | 2020-07-23T07:13:11.000Z | 2022-03-31T06:01:46.000Z | tests/components/harmony/test_config_flow.py | CrossEyeORG/homeassistant | 6c5bcbfc3ee40927458e9188d6b79bf63933d3f9 | [
"Apache-2.0"
] | 2 | 2017-09-03T16:06:02.000Z | 2021-01-12T15:07:52.000Z | """Test the Logitech Harmony Hub config flow."""
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.harmony.config_flow import CannotConnect
from homeassistant.components.harmony.const import DOMAIN, PREVIOUS_ACTIVE_ACTIVITY
from homeassistant.const import CONF_HOST, CONF_NAME
from tests.async_mock import AsyncMock, MagicMock, PropertyMock, patch
from tests.common import MockConfigEntry
def _get_mock_harmonyapi(connect=None, close=None):
harmonyapi_mock = MagicMock()
type(harmonyapi_mock).connect = AsyncMock(return_value=connect)
type(harmonyapi_mock).close = AsyncMock(return_value=close)
return harmonyapi_mock
def _get_mock_harmonyclient():
harmonyclient_mock = MagicMock()
type(harmonyclient_mock).connect = AsyncMock()
type(harmonyclient_mock).close = AsyncMock()
type(harmonyclient_mock).get_activity_name = MagicMock(return_value="Watch TV")
type(harmonyclient_mock.hub_config).activities = PropertyMock(
return_value=[{"name": "Watch TV", "id": 123}]
)
type(harmonyclient_mock.hub_config).devices = PropertyMock(
return_value=[{"name": "My TV", "id": 1234}]
)
type(harmonyclient_mock.hub_config).info = PropertyMock(return_value={})
type(harmonyclient_mock.hub_config).hub_state = PropertyMock(return_value={})
return harmonyclient_mock
async def test_user_form(hass):
"""Test we get the user form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
harmonyapi = _get_mock_harmonyapi(connect=True)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI", return_value=harmonyapi,
), patch(
"homeassistant.components.harmony.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.harmony.async_setup_entry", return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "1.2.3.4", "name": "friend"},
)
assert result2["type"] == "create_entry"
assert result2["title"] == "friend"
assert result2["data"] == {"host": "1.2.3.4", "name": "friend"}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_import(hass):
"""Test we get the form with import source."""
await setup.async_setup_component(hass, "persistent_notification", {})
harmonyapi = _get_mock_harmonyapi(connect=True)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI", return_value=harmonyapi,
), patch(
"homeassistant.components.harmony.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.harmony.async_setup_entry", return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
"host": "1.2.3.4",
"name": "friend",
"activity": "Watch TV",
"delay_secs": 0.9,
"unique_id": "555234534543",
},
)
assert result["result"].unique_id == "555234534543"
assert result["type"] == "create_entry"
assert result["title"] == "friend"
assert result["data"] == {
"host": "1.2.3.4",
"name": "friend",
"activity": "Watch TV",
"delay_secs": 0.9,
}
# It is not possible to import options at this time
# so they end up in the config entry data and are
# used a fallback when they are not in options
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_ssdp(hass):
"""Test we get the form with ssdp source."""
await setup.async_setup_component(hass, "persistent_notification", {})
harmonyapi = _get_mock_harmonyapi(connect=True)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI", return_value=harmonyapi,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
"friendlyName": "Harmony Hub",
"ssdp_location": "http://192.168.1.12:8088/description",
},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] == {}
assert result["description_placeholders"] == {
"host": "Harmony Hub",
"name": "192.168.1.12",
}
with patch(
"homeassistant.components.harmony.util.HarmonyAPI", return_value=harmonyapi,
), patch(
"homeassistant.components.harmony.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.harmony.async_setup_entry", return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {},)
assert result2["type"] == "create_entry"
assert result2["title"] == "Harmony Hub"
assert result2["data"] == {"host": "192.168.1.12", "name": "Harmony Hub"}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_ssdp_aborts_before_checking_remoteid_if_host_known(hass):
"""Test we abort without connecting if the host is already known."""
await setup.async_setup_component(hass, "persistent_notification", {})
config_entry = MockConfigEntry(
domain=DOMAIN, data={"host": "2.2.2.2", "name": "any"},
)
config_entry.add_to_hass(hass)
config_entry_without_host = MockConfigEntry(domain=DOMAIN, data={"name": "other"},)
config_entry_without_host.add_to_hass(hass)
harmonyapi = _get_mock_harmonyapi(connect=True)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI", return_value=harmonyapi,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
"friendlyName": "Harmony Hub",
"ssdp_location": "http://2.2.2.2:8088/description",
},
)
assert result["type"] == "abort"
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI", side_effect=CannotConnect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.2.3.4",
"name": "friend",
"activity": "Watch TV",
"delay_secs": 0.2,
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_options_flow(hass):
"""Test config flow options."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="abcde12345",
data={CONF_HOST: "1.2.3.4", CONF_NAME: "Guest Room"},
options={"activity": "Watch TV", "delay_secs": 0.5},
)
harmony_client = _get_mock_harmonyclient()
with patch(
"aioharmony.harmonyapi.HarmonyClient", return_value=harmony_client,
), patch("homeassistant.components.harmony.remote.HarmonyRemote.write_config_file"):
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(config_entry.entry_id)
await hass.async_block_till_done()
assert await hass.config_entries.async_unload(config_entry.entry_id)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"activity": PREVIOUS_ACTIVE_ACTIVITY, "delay_secs": 0.4},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options == {
"activity": PREVIOUS_ACTIVE_ACTIVITY,
"delay_secs": 0.4,
}
| 37.15812 | 88 | 0.66383 |
acf2008c5994bf3dfca998733f92bf83385a1846 | 1,510 | py | Python | VLAD/train_vb.py | Reza666-cloud/Sea_and_Stars | f365f7e2e46a6efda5388a103ca7f9c5ecad711a | [
"MIT"
] | 1 | 2022-01-07T06:25:24.000Z | 2022-01-07T06:25:24.000Z | VLAD/train_vb.py | Reza666-cloud/Sea_and_Stars | f365f7e2e46a6efda5388a103ca7f9c5ecad711a | [
"MIT"
] | null | null | null | VLAD/train_vb.py | Reza666-cloud/Sea_and_Stars | f365f7e2e46a6efda5388a103ca7f9c5ecad711a | [
"MIT"
] | null | null | null | import os
import glob
import pickle
import numpy as np
from VLADlib.VLAD import getDescriptors, kMeansDictionary
from VLADlib import Descriptors
def get_visualBook(data_path, K, save_path):
data_list = glob.glob(os.path.join(data_path, "*"))
descriptors_list = None
# print(descriptors_list.shape)
for i in range(len(data_list)):
if i == 0:
descriptors_list = getDescriptors(data_list[i], Descriptors.describeSIFT)
else:
arr = getDescriptors(data_list[i], Descriptors.describeSIFT)
descriptors_list = np.concatenate((descriptors_list, arr), axis=0)
print(descriptors_list.shape)
print(descriptors_list.shape)
np.save("descriptors", descriptors_list)
visualDictionary = kMeansDictionary(descriptors_list, K)
print(visualDictionary)
# file_path = "./Data/satellite_visualDictionary"+".pickle"
with open(save_path, 'wb') as f:
pickle.dump(visualDictionary, f)
for Height in [150, 200, 250, 300]:
# Height = 150
K = 149-60
satellite_data_path = '/media/data1/Datasets/Training/' + str(Height) + '/satellite'
drone_data_path = '/media/data1/Datasets/Training/' + str(Height) + '/drone'
satellite_save_path = "./Data/satellite_%s_visualDictionary" % str(Height) + ".pickle"
drone_save_path = "./Data/drone_%s_visualDictionary" % str(Height) + ".pickle"
get_visualBook(drone_data_path, K, drone_save_path)
get_visualBook(satellite_data_path, K, satellite_save_path)
| 36.829268 | 90 | 0.70596 |
acf201487ba5bfae192906bff79ee8438bb8dde9 | 1,016 | py | Python | app/core/models.py | danielbbeleza/recipe-app-api | 79749035c754a762098efda4319dbbb65e5c95a1 | [
"MIT"
] | null | null | null | app/core/models.py | danielbbeleza/recipe-app-api | 79749035c754a762098efda4319dbbb65e5c95a1 | [
"MIT"
] | null | null | null | app/core/models.py | danielbbeleza/recipe-app-api | 79749035c754a762098efda4319dbbb65e5c95a1 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, PermissionsMixin
class UserManager(BaseUserManager):
def create_user(self, email, password=None, **extra_fields):
"""Create and saves a new user"""
if not email: # "if email is empty" - empty strings are "faulty" in python and considered false
raise ValueError("Users must have an email address")
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self.db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""Custom user model that supports using email instead of username"""
email = models.EmailField(max_length=255, unique=True) # email is a primary key
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
| 35.034483 | 104 | 0.714567 |
acf201dc3b688843c9e7fc9bcb772dc22b157989 | 1,171 | py | Python | lib/rpn_msr/generate_anchors.py | czy779509408/text-detection-ctpn | e0cf757a33e83ead6dd6330ba47f5053de12c506 | [
"MIT"
] | 2,744 | 2018-04-13T09:51:22.000Z | 2022-03-29T03:07:19.000Z | lib/rpn_msr/generate_anchors.py | infinitisun/text-detection-ctpn | b94c3af3d5105b5a9ff4d4a00edf92b2d55ee4cf | [
"MIT"
] | 370 | 2018-04-17T05:36:53.000Z | 2022-02-22T02:54:10.000Z | lib/rpn_msr/generate_anchors.py | infinitisun/text-detection-ctpn | b94c3af3d5105b5a9ff4d4a00edf92b2d55ee4cf | [
"MIT"
] | 1,145 | 2018-04-13T09:52:49.000Z | 2022-03-29T02:21:13.000Z | import numpy as np
def generate_basic_anchors(sizes, base_size=16):
base_anchor = np.array([0, 0, base_size - 1, base_size - 1], np.int32)
anchors = np.zeros((len(sizes), 4), np.int32)
index = 0
for h, w in sizes:
anchors[index] = scale_anchor(base_anchor, h, w)
index += 1
return anchors
def scale_anchor(anchor, h, w):
x_ctr = (anchor[0] + anchor[2]) * 0.5
y_ctr = (anchor[1] + anchor[3]) * 0.5
scaled_anchor = anchor.copy()
scaled_anchor[0] = x_ctr - w / 2 # xmin
scaled_anchor[2] = x_ctr + w / 2 # xmax
scaled_anchor[1] = y_ctr - h / 2 # ymin
scaled_anchor[3] = y_ctr + h / 2 # ymax
return scaled_anchor
def generate_anchors(base_size=16, ratios=[0.5, 1, 2],
scales=2**np.arange(3, 6)):
heights = [11, 16, 23, 33, 48, 68, 97, 139, 198, 283]
widths = [16]
sizes = []
for h in heights:
for w in widths:
sizes.append((h, w))
return generate_basic_anchors(sizes)
if __name__ == '__main__':
import time
t = time.time()
a = generate_anchors()
print(time.time() - t)
print(a)
from IPython import embed; embed()
| 28.560976 | 74 | 0.585824 |
acf2037cbec34d122e16cd633691be0d10f5dbbc | 3,781 | py | Python | dnn8level/swap_start/auto_playok_com/construct_dnn.py | yudongqiu/gomoku | 4a95f2a5008f31fed5cb92c6bd6d55f9669ddd06 | [
"MIT"
] | 3 | 2018-06-12T09:03:41.000Z | 2019-01-14T05:34:57.000Z | dnn8level/swap_start/auto_playok_com/construct_dnn.py | yudongqiu/gomoku | 4a95f2a5008f31fed5cb92c6bd6d55f9669ddd06 | [
"MIT"
] | null | null | null | dnn8level/swap_start/auto_playok_com/construct_dnn.py | yudongqiu/gomoku | 4a95f2a5008f31fed5cb92c6bd6d55f9669ddd06 | [
"MIT"
] | null | null | null | import tensorflow as tf
import tflearn
def construct_dnn():
tf.reset_default_graph()
tflearn.init_graph(num_cores=4, gpu_memory_fraction=0.3)
tflearn.config.init_training_mode()
img_aug = tflearn.ImageAugmentation()
img_aug.add_random_90degrees_rotation()
img_aug.add_random_flip_leftright()
img_aug.add_random_flip_updown()
input_layer = tflearn.input_data(shape=[None, 15, 15, 3], data_augmentation=img_aug)
# block 1
net = tflearn.conv_2d(input_layer, 256, 3, activation=None)
net = tflearn.batch_normalization(net)
net = tflearn.activation(net, activation='relu')
# res block 1
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
# res block 2
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
# res block 3
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
# res block 4
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
# res block 5
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
# res block 6
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
# res block 7
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
# res block 8
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
# value head
net = tflearn.conv_2d(net, 1, 1, activation=None)
net = tflearn.batch_normalization(net)
net = tflearn.activation(net, activation='relu')
net = tflearn.fully_connected(net, 256, activation='relu')
final = tflearn.fully_connected(net, 1, activation='tanh')
# optmizer
sgd = tflearn.optimizers.SGD(learning_rate=0.01, lr_decay=0.95, decay_step=300000)
regression = tflearn.regression(final, optimizer=sgd, loss='mean_square', metric='R2')
model = tflearn.DNN(regression)
return model
| 45.011905 | 91 | 0.701666 |
acf203d9523744117453d6df1e7878d8e6efb370 | 747 | py | Python | LeetCode/496_Next_Greater_Element_I.py | yatingupta10/Coding-Practice | dc8a8f6bb4d15ac34dfdf3eec1407361226510c9 | [
"MIT"
] | null | null | null | LeetCode/496_Next_Greater_Element_I.py | yatingupta10/Coding-Practice | dc8a8f6bb4d15ac34dfdf3eec1407361226510c9 | [
"MIT"
] | null | null | null | LeetCode/496_Next_Greater_Element_I.py | yatingupta10/Coding-Practice | dc8a8f6bb4d15ac34dfdf3eec1407361226510c9 | [
"MIT"
] | null | null | null | l = []
findNums = [4,1,2]
nums = [1,3,4,2]
for i in range(len(findNums)):
flag = 0
for j in range(len(nums)):
if nums[j] == findNums[i]:
temp = j
break
temp_list = nums[j+1:]
print temp_list
for k in range(len(temp_list)):
if temp_list[k] > findNums[i]:
l.append(temp_list[k])
flag = 1
break
if flag == 0:
l.append(-1)
# for j in range(findNums.index(findNums[i]), len(nums)):
# if nums[j] > findNums[i]:
# l.append(nums[j])
# flag = 1
# break
# if flag == 0:
# l.append(-1)
print l
# nums = [9,6,4,2,3,5,7,0,1]
# num1 = []
# for i in range(len(nums)+1):
# num1.append(i)
# # print num1
# print set(num1) - set(nums) | 22.636364 | 61 | 0.508701 |
acf204023a81cb5e81b2d23d7109a64cd4f6388a | 276 | py | Python | mysite/accounts/views.py | Patrick-Lam-Apps/iCanCook | 1285e704e8ee079783a8f85bc07b6f3742e0a7e9 | [
"Unlicense"
] | null | null | null | mysite/accounts/views.py | Patrick-Lam-Apps/iCanCook | 1285e704e8ee079783a8f85bc07b6f3742e0a7e9 | [
"Unlicense"
] | null | null | null | mysite/accounts/views.py | Patrick-Lam-Apps/iCanCook | 1285e704e8ee079783a8f85bc07b6f3742e0a7e9 | [
"Unlicense"
] | null | null | null | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return HttpResponse("Hello, world. You're at the accounts index.")
def test(request, id):
return HttpResponse("Argument is: " + id)
| 21.230769 | 71 | 0.702899 |
acf204add216b33953a4ffbc975fc9e487e12d54 | 1,176 | py | Python | py_solutions_91-100/Euler_100.py | tijko/Project-Euler | d953a2bf6932c2c4e1235409fedf760add65a0ba | [
"MIT"
] | null | null | null | py_solutions_91-100/Euler_100.py | tijko/Project-Euler | d953a2bf6932c2c4e1235409fedf760add65a0ba | [
"MIT"
] | 1 | 2022-03-15T02:49:09.000Z | 2022-03-15T02:49:09.000Z | py_solutions_91-100/Euler_100.py | tijko/Project-Euler | d953a2bf6932c2c4e1235409fedf760add65a0ba | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
If a box contains twenty-one coloured discs, composed of fifteen blue discs and
six red discs, and two discs were taken at random, it can be seen that the
probability of taking two blue discs, P(BB) = (15/21)×(14/20) = 1/2.
The next such arrangement, for which there is exactly 50% chance of taking two
blue discs at random, is a box containing eighty-five blue discs and thirty-five
red discs.
By finding the first arrangement to contain over 1012 = 1,000,000,000,000 discs
in total, determine the number of blue discs that the box would contain.
'''
from __future__ import print_function
import timeit
limit = 10**12
def arranged_probability(b1, b2, combined_discs):
discs = int(combined_discs**0.5)
if discs > limit: return b1
b2 = b1 * 2 + b2 + discs * 2
return arranged_probability(b2 + 1, b2, (b2 + 1) * b2 * 2)
def euler_100():
b1, b2 = 3, 2
return arranged_probability(b1, b2, b1 * b2 * 2)
if __name__ == '__main__':
start = timeit.default_timer()
print('Answer: {}'.format(euler_100()))
stop = timeit.default_timer()
print('Time: {0:9.5f}'.format(stop - start))
| 30.153846 | 81 | 0.688776 |
acf204eb96c170f3d3b9d8fb36db69778bdd5640 | 20,899 | py | Python | sahara-10.0.0/sahara/db/sqlalchemy/models.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 161 | 2015-01-05T11:46:42.000Z | 2022-01-05T07:41:39.000Z | sahara-10.0.0/sahara/db/sqlalchemy/models.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | sahara-10.0.0/sahara/db/sqlalchemy/models.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 118 | 2015-01-29T06:34:35.000Z | 2021-12-06T07:30:09.000Z | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_utils import uuidutils
import sqlalchemy as sa
from sqlalchemy.orm import relationship
from sahara.db.sqlalchemy import model_base as mb
from sahara.db.sqlalchemy import types as st
# Helpers
def _generate_unicode_uuid():
return uuidutils.generate_uuid()
def _id_column():
return sa.Column(sa.String(36),
primary_key=True,
default=_generate_unicode_uuid)
# Main objects: Cluster, NodeGroup, Instance
class Cluster(mb.SaharaBase):
"""Contains all info about cluster."""
__tablename__ = 'clusters'
__table_args__ = (
sa.UniqueConstraint('name', 'tenant_id'),
)
id = _id_column()
name = sa.Column(sa.String(80), nullable=False)
description = sa.Column(sa.Text)
tenant_id = sa.Column(sa.String(36))
trust_id = sa.Column(sa.String(36))
is_transient = sa.Column(sa.Boolean, default=False)
plugin_name = sa.Column(sa.String(80), nullable=False)
hadoop_version = sa.Column(sa.String(80), nullable=False)
cluster_configs = sa.Column(st.JsonDictType())
default_image_id = sa.Column(sa.String(36))
neutron_management_network = sa.Column(sa.String(36))
anti_affinity = sa.Column(st.JsonListType())
anti_affinity_ratio = sa.Column(sa.Integer, default=1)
management_private_key = sa.Column(sa.Text, nullable=False)
management_public_key = sa.Column(sa.Text, nullable=False)
user_keypair_id = sa.Column(sa.String(80))
status = sa.Column(sa.String(80))
status_description = sa.Column(st.LongText())
info = sa.Column(st.JsonDictType())
extra = sa.Column(st.JsonDictType())
rollback_info = sa.Column(st.JsonDictType())
sahara_info = sa.Column(st.JsonDictType())
use_autoconfig = sa.Column(sa.Boolean(), default=True)
provision_progress = relationship('ClusterProvisionStep',
cascade="all,delete",
backref='cluster',
lazy='subquery')
verification = relationship('ClusterVerification', cascade="all,delete",
backref="cluster", lazy='joined')
node_groups = relationship('NodeGroup', cascade="all,delete",
backref='cluster', lazy='subquery')
cluster_template_id = sa.Column(sa.String(36),
sa.ForeignKey('cluster_templates.id'))
cluster_template = relationship('ClusterTemplate',
backref="clusters")
shares = sa.Column(st.JsonListType())
is_public = sa.Column(sa.Boolean())
is_protected = sa.Column(sa.Boolean())
domain_name = sa.Column(sa.String(255))
def to_dict(self, show_progress=False):
d = super(Cluster, self).to_dict()
d['node_groups'] = [ng.to_dict() for ng in self.node_groups]
d['provision_progress'] = [pp.to_dict(show_progress) for pp in
self.provision_progress]
if self.verification:
d['verification'] = self.verification[0].to_dict()
return d
class NodeGroup(mb.SaharaBase):
"""Specifies group of nodes within a cluster."""
__tablename__ = 'node_groups'
__table_args__ = (
sa.UniqueConstraint('name', 'cluster_id'),
)
id = _id_column()
name = sa.Column(sa.String(80), nullable=False)
tenant_id = sa.Column(sa.String(36))
flavor_id = sa.Column(sa.String(36), nullable=False)
image_id = sa.Column(sa.String(36))
image_username = sa.Column(sa.String(36))
node_processes = sa.Column(st.JsonListType())
node_configs = sa.Column(st.JsonDictType())
volumes_per_node = sa.Column(sa.Integer)
volumes_size = sa.Column(sa.Integer)
volumes_availability_zone = sa.Column(sa.String(255))
volume_mount_prefix = sa.Column(sa.String(80))
volume_type = sa.Column(sa.String(255))
boot_from_volume = sa.Column(sa.Boolean(), default=False, nullable=False)
boot_volume_type = sa.Column(sa.String(255))
boot_volume_availability_zone = sa.Column(sa.String(255))
boot_volume_local_to_instance = sa.Column(sa.Boolean())
count = sa.Column(sa.Integer, nullable=False)
use_autoconfig = sa.Column(sa.Boolean(), default=True)
instances = relationship('Instance', cascade="all,delete",
backref='node_group',
order_by="Instance.instance_name",
lazy='subquery')
cluster_id = sa.Column(sa.String(36), sa.ForeignKey('clusters.id'))
node_group_template_id = sa.Column(sa.String(36),
sa.ForeignKey(
'node_group_templates.id'))
node_group_template = relationship('NodeGroupTemplate',
backref="node_groups")
floating_ip_pool = sa.Column(sa.String(36))
security_groups = sa.Column(st.JsonListType())
auto_security_group = sa.Column(sa.Boolean())
availability_zone = sa.Column(sa.String(255))
open_ports = sa.Column(st.JsonListType())
is_proxy_gateway = sa.Column(sa.Boolean())
volume_local_to_instance = sa.Column(sa.Boolean())
shares = sa.Column(st.JsonListType())
def to_dict(self):
d = super(NodeGroup, self).to_dict()
d['instances'] = [i.to_dict() for i in self.instances]
return d
class Instance(mb.SaharaBase):
"""An OpenStack instance created for the cluster."""
__tablename__ = 'instances'
__table_args__ = (
sa.UniqueConstraint('instance_id', 'node_group_id'),
)
id = _id_column()
tenant_id = sa.Column(sa.String(36))
node_group_id = sa.Column(sa.String(36), sa.ForeignKey('node_groups.id'))
instance_id = sa.Column(sa.String(36))
instance_name = sa.Column(sa.String(80), nullable=False)
internal_ip = sa.Column(sa.String(45))
management_ip = sa.Column(sa.String(45))
volumes = sa.Column(st.JsonListType())
storage_devices_number = sa.Column(sa.Integer)
dns_hostname = sa.Column(sa.String(255))
# Template objects: ClusterTemplate, NodeGroupTemplate, TemplatesRelation
class ClusterTemplate(mb.SaharaBase):
"""Template for Cluster."""
__tablename__ = 'cluster_templates'
__table_args__ = (
sa.UniqueConstraint('name', 'tenant_id'),
)
id = _id_column()
name = sa.Column(sa.String(80), nullable=False)
description = sa.Column(sa.Text)
cluster_configs = sa.Column(st.JsonDictType())
default_image_id = sa.Column(sa.String(36))
anti_affinity = sa.Column(st.JsonListType())
tenant_id = sa.Column(sa.String(36))
neutron_management_network = sa.Column(sa.String(36))
plugin_name = sa.Column(sa.String(80), nullable=False)
hadoop_version = sa.Column(sa.String(80), nullable=False)
node_groups = relationship('TemplatesRelation', cascade="all,delete",
backref='cluster_template', lazy='subquery')
is_default = sa.Column(sa.Boolean(), default=False)
use_autoconfig = sa.Column(sa.Boolean(), default=True)
shares = sa.Column(st.JsonListType())
is_public = sa.Column(sa.Boolean())
is_protected = sa.Column(sa.Boolean())
domain_name = sa.Column(sa.String(255))
def to_dict(self):
d = super(ClusterTemplate, self).to_dict()
d['node_groups'] = [tr.to_dict() for tr in
self.node_groups]
return d
class NodeGroupTemplate(mb.SaharaBase):
"""Template for NodeGroup."""
__tablename__ = 'node_group_templates'
__table_args__ = (
sa.UniqueConstraint('name', 'tenant_id'),
)
id = _id_column()
name = sa.Column(sa.String(80), nullable=False)
description = sa.Column(sa.Text)
tenant_id = sa.Column(sa.String(36))
flavor_id = sa.Column(sa.String(36), nullable=False)
image_id = sa.Column(sa.String(36))
plugin_name = sa.Column(sa.String(80), nullable=False)
hadoop_version = sa.Column(sa.String(80), nullable=False)
node_processes = sa.Column(st.JsonListType())
node_configs = sa.Column(st.JsonDictType())
volumes_per_node = sa.Column(sa.Integer, nullable=False)
volumes_size = sa.Column(sa.Integer)
volumes_availability_zone = sa.Column(sa.String(255))
volume_mount_prefix = sa.Column(sa.String(80))
volume_type = sa.Column(sa.String(255))
boot_from_volume = sa.Column(sa.Boolean(), default=False, nullable=False)
boot_volume_type = sa.Column(sa.String(255))
boot_volume_availability_zone = sa.Column(sa.String(255))
boot_volume_local_to_instance = sa.Column(sa.Boolean())
floating_ip_pool = sa.Column(sa.String(36))
security_groups = sa.Column(st.JsonListType())
auto_security_group = sa.Column(sa.Boolean())
availability_zone = sa.Column(sa.String(255))
is_proxy_gateway = sa.Column(sa.Boolean())
volume_local_to_instance = sa.Column(sa.Boolean())
is_default = sa.Column(sa.Boolean(), default=False)
use_autoconfig = sa.Column(sa.Boolean(), default=True)
shares = sa.Column(st.JsonListType())
is_public = sa.Column(sa.Boolean())
is_protected = sa.Column(sa.Boolean())
class TemplatesRelation(mb.SaharaBase):
"""NodeGroupTemplate - ClusterTemplate relationship.
In fact, it's a template of NodeGroup in Cluster.
"""
__tablename__ = 'templates_relations'
id = _id_column()
tenant_id = sa.Column(sa.String(36))
name = sa.Column(sa.String(80), nullable=False)
flavor_id = sa.Column(sa.String(36), nullable=False)
image_id = sa.Column(sa.String(36))
node_processes = sa.Column(st.JsonListType())
node_configs = sa.Column(st.JsonDictType())
volumes_per_node = sa.Column(sa.Integer)
volumes_size = sa.Column(sa.Integer)
volumes_availability_zone = sa.Column(sa.String(255))
volume_mount_prefix = sa.Column(sa.String(80))
volume_type = sa.Column(sa.String(255))
boot_from_volume = sa.Column(sa.Boolean(), default=False, nullable=False)
boot_volume_type = sa.Column(sa.String(255))
boot_volume_availability_zone = sa.Column(sa.String(255))
boot_volume_local_to_instance = sa.Column(sa.Boolean())
count = sa.Column(sa.Integer, nullable=False)
use_autoconfig = sa.Column(sa.Boolean(), default=True)
cluster_template_id = sa.Column(sa.String(36),
sa.ForeignKey('cluster_templates.id'))
node_group_template_id = sa.Column(sa.String(36),
sa.ForeignKey(
'node_group_templates.id'))
node_group_template = relationship('NodeGroupTemplate',
backref="templates_relations")
floating_ip_pool = sa.Column(sa.String(36))
security_groups = sa.Column(st.JsonListType())
auto_security_group = sa.Column(sa.Boolean())
availability_zone = sa.Column(sa.String(255))
is_proxy_gateway = sa.Column(sa.Boolean())
volume_local_to_instance = sa.Column(sa.Boolean())
shares = sa.Column(st.JsonListType())
# EDP objects: DataSource, Job, Job Execution, JobBinary
class DataSource(mb.SaharaBase):
"""DataSource - represent a diffident types of data sources.
e.g. Swift, Cassandra etc.
"""
__tablename__ = 'data_sources'
__table_args__ = (
sa.UniqueConstraint('name', 'tenant_id'),
)
id = _id_column()
tenant_id = sa.Column(sa.String(36))
name = sa.Column(sa.String(80), nullable=False)
description = sa.Column(sa.Text())
type = sa.Column(sa.String(80), nullable=False)
url = sa.Column(sa.String(256), nullable=False)
credentials = sa.Column(st.JsonDictType())
is_public = sa.Column(sa.Boolean())
is_protected = sa.Column(sa.Boolean())
class JobExecution(mb.SaharaBase):
"""JobExecution - represent a job execution of specific cluster."""
__tablename__ = 'job_executions'
id = _id_column()
tenant_id = sa.Column(sa.String(36))
job_id = sa.Column(sa.String(36),
sa.ForeignKey('jobs.id'))
input_id = sa.Column(sa.String(36),
sa.ForeignKey('data_sources.id'))
output_id = sa.Column(sa.String(36),
sa.ForeignKey('data_sources.id'))
start_time = sa.Column(sa.DateTime())
end_time = sa.Column(sa.DateTime())
cluster_id = sa.Column(sa.String(36),
sa.ForeignKey('clusters.id'))
info = sa.Column(st.JsonDictType())
engine_job_id = sa.Column(sa.String(100))
return_code = sa.Column(sa.String(80))
job_configs = sa.Column(st.JsonDictType())
extra = sa.Column(st.JsonDictType())
data_source_urls = sa.Column(st.JsonDictType())
is_public = sa.Column(sa.Boolean())
is_protected = sa.Column(sa.Boolean())
def to_dict(self):
d = super(JobExecution, self).to_dict()
# The oozie_job_id filed is renamed to engine_job_id
# to make this field more universal. But, we need to
# carry both engine_job_id and oozie_job_id until we
# can deprecate "oozie_job_id".
d['oozie_job_id'] = self.engine_job_id
return d
mains_association = sa.Table("mains_association",
mb.SaharaBase.metadata,
sa.Column("Job_id",
sa.String(36),
sa.ForeignKey("jobs.id")),
sa.Column("JobBinary_id",
sa.String(36),
sa.ForeignKey("job_binaries.id"))
)
libs_association = sa.Table("libs_association",
mb.SaharaBase.metadata,
sa.Column("Job_id",
sa.String(36),
sa.ForeignKey("jobs.id")),
sa.Column("JobBinary_id",
sa.String(36),
sa.ForeignKey("job_binaries.id"))
)
class Job(mb.SaharaBase):
"""Job - description and location of a job binary."""
__tablename__ = 'jobs'
__table_args__ = (
sa.UniqueConstraint('name', 'tenant_id'),
)
id = _id_column()
tenant_id = sa.Column(sa.String(36))
name = sa.Column(sa.String(80), nullable=False)
description = sa.Column(sa.Text())
type = sa.Column(sa.String(80), nullable=False)
is_public = sa.Column(sa.Boolean())
is_protected = sa.Column(sa.Boolean())
mains = relationship("JobBinary",
secondary=mains_association, lazy="subquery")
libs = relationship("JobBinary",
secondary=libs_association, lazy="subquery")
interface = relationship('JobInterfaceArgument',
cascade="all,delete",
order_by="JobInterfaceArgument.order",
backref='job',
lazy='subquery')
def to_dict(self):
d = super(Job, self).to_dict()
d['mains'] = [jb.to_dict() for jb in self.mains]
d['libs'] = [jb.to_dict() for jb in self.libs]
d['interface'] = [arg.to_dict() for arg in self.interface]
return d
class JobInterfaceArgument(mb.SaharaBase):
"""JobInterfaceArgument - Configuration setting for a specific job."""
__tablename__ = 'job_interface_arguments'
__table_args__ = (
sa.UniqueConstraint('job_id', 'name'),
sa.UniqueConstraint('job_id', 'order')
)
id = _id_column()
job_id = sa.Column(sa.String(36), sa.ForeignKey('jobs.id'),
nullable=False)
tenant_id = sa.Column(sa.String(36))
name = sa.Column(sa.String(80), nullable=False)
description = sa.Column(sa.Text())
mapping_type = sa.Column(sa.String(80), nullable=False)
location = sa.Column(sa.Text(), nullable=False)
value_type = sa.Column(sa.String(80), nullable=False)
required = sa.Column(sa.Boolean(), nullable=False)
order = sa.Column(sa.SmallInteger(), nullable=False)
default = sa.Column(sa.Text())
class JobBinaryInternal(mb.SaharaBase):
"""JobBinaryInternal - raw binary storage for executable jobs."""
__tablename__ = 'job_binary_internal'
__table_args__ = (
sa.UniqueConstraint('name', 'tenant_id'),
)
id = _id_column()
tenant_id = sa.Column(sa.String(36))
name = sa.Column(sa.String(80), nullable=False)
data = sa.orm.deferred(sa.Column(st.LargeBinary()))
datasize = sa.Column(sa.BIGINT)
is_public = sa.Column(sa.Boolean())
is_protected = sa.Column(sa.Boolean())
class JobBinary(mb.SaharaBase):
"""JobBinary - raw binary storage for executable jobs."""
__tablename__ = 'job_binaries'
__table_args__ = (
sa.UniqueConstraint('name', 'tenant_id'),
)
id = _id_column()
tenant_id = sa.Column(sa.String(36))
name = sa.Column(sa.String(80), nullable=False)
description = sa.Column(sa.Text())
url = sa.Column(sa.String(256), nullable=False)
extra = sa.Column(st.JsonDictType())
is_public = sa.Column(sa.Boolean())
is_protected = sa.Column(sa.Boolean())
class ClusterEvent(mb.SaharaBase):
""""Event - represent a info about current provision step."""
__tablename__ = 'cluster_events'
__table_args__ = (
sa.UniqueConstraint('id', 'step_id'),
)
id = _id_column()
node_group_id = sa.Column(sa.String(36))
instance_id = sa.Column(sa.String(36))
instance_name = sa.Column(sa.String(80))
event_info = sa.Column(sa.Text)
successful = sa.Column(sa.Boolean, nullable=False)
step_id = sa.Column(sa.String(36), sa.ForeignKey(
'cluster_provision_steps.id'))
class ClusterProvisionStep(mb.SaharaBase):
"""ProvisionStep - represent a current provision step of cluster."""
__tablename__ = 'cluster_provision_steps'
__table_args__ = (
sa.UniqueConstraint('id', 'cluster_id'),
)
id = _id_column()
cluster_id = sa.Column(sa.String(36), sa.ForeignKey('clusters.id'))
tenant_id = sa.Column(sa.String(36))
step_name = sa.Column(sa.String(80))
step_type = sa.Column(sa.String(36))
total = sa.Column(sa.Integer)
successful = sa.Column(sa.Boolean, nullable=True)
events = relationship('ClusterEvent', cascade="all,delete",
backref='ClusterProvisionStep',
lazy='subquery')
def to_dict(self, show_progress):
d = super(ClusterProvisionStep, self).to_dict()
if show_progress:
d['events'] = [event.to_dict() for event in self.events]
return d
class ClusterVerification(mb.SaharaBase):
"""ClusterVerification represent results of cluster health checks."""
__tablename__ = 'cluster_verifications'
__table_args__ = (sa.UniqueConstraint('id', 'cluster_id'),)
id = _id_column()
cluster_id = sa.Column(
sa.String(36), sa.ForeignKey('clusters.id'))
status = sa.Column(sa.String(15))
checks = relationship(
'ClusterHealthCheck', cascade="all,delete",
backref='ClusterVerification', lazy='subquery')
def to_dict(self):
base = super(ClusterVerification, self).to_dict()
base['checks'] = [check.to_dict() for check in self.checks]
return base
class ClusterHealthCheck(mb.SaharaBase):
"""ClusterHealthCheck respresent cluster health check."""
__tablename__ = 'cluster_health_checks'
__table_args__ = (sa.UniqueConstraint('id', 'verification_id'),)
id = _id_column()
verification_id = sa.Column(
sa.String(36), sa.ForeignKey('cluster_verifications.id'))
status = sa.Column(sa.String(15))
description = sa.Column(sa.Text)
name = sa.Column(sa.String(80))
class PluginData(mb.SaharaBase):
"""Plugin Data represents Provisioning Plugin."""
__tablename__ = 'plugin_data'
__table_args__ = (
sa.UniqueConstraint('name', 'tenant_id'),
)
id = _id_column()
tenant_id = sa.Column(sa.String(36), nullable=False)
name = sa.Column(sa.String(15), nullable=False)
plugin_labels = sa.Column(st.JsonDictType())
version_labels = sa.Column(st.JsonDictType())
| 36.600701 | 77 | 0.638452 |
acf205d06e8a9c571069648006558c660c5467dc | 2,327 | py | Python | commitizen/cz/customize/customize.py | 12rambau/commitizen | 4309813974b6be72a246d47fc77f4c7f8ef64be1 | [
"MIT"
] | 866 | 2020-03-18T06:09:07.000Z | 2022-03-30T15:46:17.000Z | commitizen/cz/customize/customize.py | 12rambau/commitizen | 4309813974b6be72a246d47fc77f4c7f8ef64be1 | [
"MIT"
] | 364 | 2020-03-18T02:13:09.000Z | 2022-03-31T01:57:12.000Z | commitizen/cz/customize/customize.py | 12rambau/commitizen | 4309813974b6be72a246d47fc77f4c7f8ef64be1 | [
"MIT"
] | 136 | 2020-03-20T18:06:32.000Z | 2022-03-31T00:02:34.000Z | try:
from jinja2 import Template
except ImportError:
from string import Template # type: ignore
from typing import Any, Dict, List, Optional
from commitizen import defaults
from commitizen.config import BaseConfig
from commitizen.cz.base import BaseCommitizen
from commitizen.exceptions import MissingCzCustomizeConfigError
__all__ = ["CustomizeCommitsCz"]
class CustomizeCommitsCz(BaseCommitizen):
bump_pattern = defaults.bump_pattern
bump_map = defaults.bump_map
change_type_order = defaults.change_type_order
def __init__(self, config: BaseConfig):
super(CustomizeCommitsCz, self).__init__(config)
if "customize" not in self.config.settings:
raise MissingCzCustomizeConfigError()
self.custom_settings = self.config.settings["customize"]
custom_bump_pattern = self.custom_settings.get("bump_pattern")
if custom_bump_pattern:
self.bump_pattern = custom_bump_pattern
custom_bump_map = self.custom_settings.get("bump_map")
if custom_bump_map:
self.bump_map = custom_bump_map
custom_change_type_order = self.custom_settings.get("change_type_order")
if custom_change_type_order:
self.change_type_order = custom_change_type_order
def questions(self) -> List[Dict[str, Any]]:
return self.custom_settings.get("questions")
def message(self, answers: dict) -> str:
message_template = Template(self.custom_settings.get("message_template"))
if getattr(Template, "substitute", None):
return message_template.substitute(**answers) # type: ignore
else:
return message_template.render(**answers)
def example(self) -> Optional[str]:
return self.custom_settings.get("example")
def schema_pattern(self) -> Optional[str]:
return self.custom_settings.get("schema_pattern")
def schema(self) -> Optional[str]:
return self.custom_settings.get("schema")
def info(self) -> Optional[str]:
info_path = self.custom_settings.get("info_path")
info = self.custom_settings.get("info")
if info_path:
with open(info_path, "r") as f:
content = f.read()
return content
elif info:
return info
return None
| 33.724638 | 81 | 0.687151 |
acf205d1f5f2eea6da62e23d61d5c6ff7c316a13 | 997 | py | Python | backend/api/views/download/factory.py | hsnprsd/doccano | 2fce22b7073bd38188c3df6732c273b1967966a8 | [
"MIT"
] | null | null | null | backend/api/views/download/factory.py | hsnprsd/doccano | 2fce22b7073bd38188c3df6732c273b1967966a8 | [
"MIT"
] | null | null | null | backend/api/views/download/factory.py | hsnprsd/doccano | 2fce22b7073bd38188c3df6732c273b1967966a8 | [
"MIT"
] | null | null | null | from typing import Type
from ...models import DOCUMENT_CLASSIFICATION, SEQ2SEQ, SEQUENCE_LABELING
from . import catalog, repositories, writer
def create_repository(project) -> repositories.BaseRepository:
mapping = {
DOCUMENT_CLASSIFICATION: repositories.TextClassificationRepository,
SEQUENCE_LABELING: repositories.SequenceLabelingRepository,
SEQ2SEQ: repositories.Seq2seqRepository,
}
if project.project_type not in mapping:
ValueError(f'Invalid project type: {project.project_type}')
repository = mapping.get(project.project_type)(project)
return repository
def create_writer(format: str) -> Type[writer.BaseWriter]:
mapping = {
catalog.CSV.name: writer.CsvWriter,
catalog.JSON.name: writer.JSONWriter,
catalog.JSONL.name: writer.JSONLWriter,
catalog.FastText.name: writer.FastTextWriter,
}
if format not in mapping:
ValueError(f'Invalid format: {format}')
return mapping[format]
| 34.37931 | 75 | 0.730191 |
acf205e3fbaac3e15b3be88de3da7aa8ec4dbc4b | 262 | py | Python | greentest/test__httplib.py | newbrough/gevent-0.13.7 | 5c4ebef04f280e4ee1501a6e697e3b4681b1568d | [
"BSD-3-Clause",
"MIT"
] | 1 | 2021-03-08T14:08:52.000Z | 2021-03-08T14:08:52.000Z | greentest/test__httplib.py | newbrough/gevent-0.13.7 | 5c4ebef04f280e4ee1501a6e697e3b4681b1568d | [
"BSD-3-Clause",
"MIT"
] | null | null | null | greentest/test__httplib.py | newbrough/gevent-0.13.7 | 5c4ebef04f280e4ee1501a6e697e3b4681b1568d | [
"BSD-3-Clause",
"MIT"
] | null | null | null | import unittest
from gevent import httplib
class Test(unittest.TestCase):
def test(self):
conn = httplib.HTTPConnection('www.google.com')
conn.request('GET', '/')
conn.getresponse()
if __name__ == "__main__":
unittest.main()
| 17.466667 | 55 | 0.645038 |
acf2073a0fd9f26698302b4126d0c0f3cc14b386 | 252 | py | Python | pipe_generator.py | SteveDexter24/Flappy-Bird | 34846a30bf10e4dc06fc6f0166cd40babbea2429 | [
"MIT"
] | 1 | 2022-01-11T13:26:19.000Z | 2022-01-11T13:26:19.000Z | pipe_generator.py | SteveDexter24/Flappy-Bird | 34846a30bf10e4dc06fc6f0166cd40babbea2429 | [
"MIT"
] | null | null | null | pipe_generator.py | SteveDexter24/Flappy-Bird | 34846a30bf10e4dc06fc6f0166cd40babbea2429 | [
"MIT"
] | null | null | null | import random
def pip_gen():
f = open("pipe.txt", "a")
write = ""
for i in range(100):
write += ('pipe_ran(' + str(i) + ') <= ' + str(random.randint(100, 400)) + '; ')
if i % 7 == 0:
write += '\n'
f.write(write)
f.close()
| 14.823529 | 84 | 0.480159 |
acf207916110d087ad8e3209040d9c29782c51ed | 3,771 | py | Python | bashplotlib/scatterplot.py | anshalshukla/bashplotlib | c94498d40f674fadba86ff55fb3ca7aad99a47f7 | [
"MIT"
] | null | null | null | bashplotlib/scatterplot.py | anshalshukla/bashplotlib | c94498d40f674fadba86ff55fb3ca7aad99a47f7 | [
"MIT"
] | null | null | null | bashplotlib/scatterplot.py | anshalshukla/bashplotlib | c94498d40f674fadba86ff55fb3ca7aad99a47f7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Plotting terminal based scatterplots
"""
from __future__ import print_function
import csv
import sys
import optparse
from .utils.helpers import *
from .utils.commandhelp import scatter
def get_scale(series, is_y=False, steps=20):
min_val = min(series)
max_val = max(series)
scaled_series = []
for x in drange(min_val, max_val, (max_val - min_val) / steps,
include_stop=True):
if x > 0 and scaled_series and max(scaled_series) < 0:
scaled_series.append(0.0)
scaled_series.append(x)
if is_y:
scaled_series.reverse()
return scaled_series
def _plot_scatter(xs, ys, size, pch, colour, title, cs):
plotted = set()
if title:
print(box_text(title, 2 * (len(get_scale(xs, False, size)) + 1)))
print("+",end="")
print("-" * (2 * (len(get_scale(xs, False, size)) + 1)),end="")
print("+")
for y in get_scale(ys, True, size):
print("|", end=' ')
for x in get_scale(xs, False, size):
point = " "
for (i, (xp, yp)) in enumerate(zip(xs, ys)):
if xp <= x and yp >= y and (xp, yp) not in plotted:
point = pch
plotted.add((xp, yp))
if cs:
colour = cs[i]
printcolour(point + " ", True, colour)
print(" |")
print("+",end="")
print("-" * (2 * (len(get_scale(xs, False, size)) + 1)),end="")
print("+")
def plot_scatter(f, xs, ys, size, pch, colour, title):
"""
Form a complex number.
Arguments:
f -- comma delimited file w/ x,y coordinates
xs -- if f not specified this is a file w/ x coordinates
ys -- if f not specified this is a filew / y coordinates
size -- size of the plot
pch -- shape of the points (any character)
colour -- colour of the points
title -- title of the plot
"""
cs = None
if f:
if isinstance(f, str):
with open(f) as fh:
data = [tuple(line.strip().split(',')) for line in fh]
else:
data = [tuple(line.strip().split(',')) for line in f]
xs = [float(i[0]) for i in data]
ys = [float(i[1]) for i in data]
if len(data[0]) > 2:
cs = [i[2].strip() for i in data]
elif isinstance(xs, list) and isinstance(ys, list):
pass
else:
with open(xs) as fh:
xs = [float(str(row).strip()) for row in fh]
with open(ys) as fh:
ys = [float(str(row).strip()) for row in fh]
_plot_scatter(xs, ys, size, pch, colour, title, cs)
def main():
parser = optparse.OptionParser(usage=scatter['usage'])
parser.add_option('-f', '--file', help='a csv w/ x and y coordinates', default=None, dest='f')
parser.add_option('-t', '--title', help='title for the chart', default="", dest='t')
parser.add_option('-x', help='x coordinates', default=None, dest='x')
parser.add_option('-y', help='y coordinates', default=None, dest='y')
parser.add_option('-s', '--size', help='y coordinates', default=20, dest='size', type='int')
parser.add_option('-p', '--pch', help='shape of point', default="x", dest='pch')
parser.add_option('-c', '--colour', help='colour of the plot (%s)' %
colour_help, default='default', dest='colour')
opts, args = parser.parse_args()
if opts.f is None and (opts.x is None or opts.y is None):
opts.f = sys.stdin.readlines()
if opts.f or (opts.x and opts.y):
plot_scatter(opts.f, opts.x, opts.y, opts.size, opts.pch, opts.colour, opts.t)
else:
print("nothing to plot!")
if __name__ == "__main__":
main()
| 31.957627 | 98 | 0.557147 |
acf207eaae99ec8e19c081c7d94e8c718e46ce9e | 7,055 | py | Python | ixnetwork_restpy/testplatform/sessions/ixnetwork/globals/topology/vxlanv6/vxlanv6_342acd1ce35073333be5a63be5b86440.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | ixnetwork_restpy/testplatform/sessions/ixnetwork/globals/topology/vxlanv6/vxlanv6_342acd1ce35073333be5a63be5b86440.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | ixnetwork_restpy/testplatform/sessions/ixnetwork/globals/topology/vxlanv6/vxlanv6_342acd1ce35073333be5a63be5b86440.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class Vxlanv6(Base):
"""VXLAN global and per-port settings
The Vxlanv6 class encapsulates a required vxlanv6 resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'vxlanv6'
_SDM_ATT_MAP = {
'Count': 'count',
'DescriptiveName': 'descriptiveName',
'InnerFrameMinimumSize': 'innerFrameMinimumSize',
'Name': 'name',
'OuterIpDestMode': 'outerIpDestMode',
'RowNames': 'rowNames',
'Udp_dest': 'udp_dest',
}
def __init__(self, parent):
super(Vxlanv6, self).__init__(parent)
@property
def StartRate(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.globals.topology.ipv6autoconfiguration.startrate.startrate_1bba90e9b5242a924a45ce8454358006.StartRate): An instance of the StartRate class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.globals.topology.ipv6autoconfiguration.startrate.startrate_1bba90e9b5242a924a45ce8454358006 import StartRate
if self._properties.get('StartRate', None) is None:
return StartRate(self)._select()
else:
return self._properties.get('StartRate')
@property
def StopRate(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.globals.topology.ipv6autoconfiguration.stoprate.stoprate_e57c921a314c7c4a39ab432f5e2970a0.StopRate): An instance of the StopRate class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.globals.topology.ipv6autoconfiguration.stoprate.stoprate_e57c921a314c7c4a39ab432f5e2970a0 import StopRate
if self._properties.get('StopRate', None) is None:
return StopRate(self)._select()
else:
return self._properties.get('StopRate')
@property
def Count(self):
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DescriptiveName(self):
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def InnerFrameMinimumSize(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Pad inner frame with 0 in order to have inner frame of minumum specified size.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['InnerFrameMinimumSize']))
@property
def Name(self):
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def OuterIpDestMode(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Indicates what is the outer destination IP in the generated fpga traffic
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OuterIpDestMode']))
@property
def RowNames(self):
"""
Returns
-------
- list(str): Name of rows
"""
return self._get_attribute(self._SDM_ATT_MAP['RowNames'])
@property
def Udp_dest(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): UDP Destination Port.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Udp_dest']))
def update(self, Name=None):
"""Updates vxlanv6 resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def get_device_ids(self, PortNames=None, InnerFrameMinimumSize=None, OuterIpDestMode=None, Udp_dest=None):
"""Base class infrastructure that gets a list of vxlanv6 device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- InnerFrameMinimumSize (str): optional regex of innerFrameMinimumSize
- OuterIpDestMode (str): optional regex of outerIpDestMode
- Udp_dest (str): optional regex of udp_dest
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| 37.930108 | 201 | 0.671722 |
acf20830553fbca6268e876e8fdeee34e05f8439 | 3,391 | py | Python | localpackage/utils.py | chapmanwilliam/Ogden8 | e17b26609fc3cdd5650bfeba387bd7253513e00e | [
"Apache-2.0"
] | null | null | null | localpackage/utils.py | chapmanwilliam/Ogden8 | e17b26609fc3cdd5650bfeba387bd7253513e00e | [
"Apache-2.0"
] | null | null | null | localpackage/utils.py | chapmanwilliam/Ogden8 | e17b26609fc3cdd5650bfeba387bd7253513e00e | [
"Apache-2.0"
] | null | null | null | import math
from datetime import datetime
from dateutil.parser import parse
from localpackage.errorLogging import errors
sexes=['Male','Female']
regions=['UK','EW','EN','SC','WA','NI','GB']
years=[2008, 2018]
wordPoints=['TRIAL', 'LIFE', 'RETIREMENT', 'INJURY']
plusMinus=['+','-']
fr=['Y','M','W','D','A']
discountOptions=['A','M','I','C','D']
defaultdiscountRate=-0.25/100
defaultSwiftCarpenterDiscountRate=5/100
defaultOgden=8
Ogden=[7,8]
ContDetailsdefault={'employed':True,'qualification':'D','disabled':False} #default
Ogden7={'year':2008,'region':'UK','yrAttainedIn':2011}
Ogden8={'year':2018,'region':'UK','yrAttainedIn':2022}
def isfloat(value):
try:
float(value)
return True
except ValueError:
return False
def returnFreq(freq,fromAge=None, toAge=None):
#where freq is a string '<3Y' meaning every 3 years starting at the first date
#returns tuple of timedelta and whether < or >
if len(freq)<1:
errors.add("Nil length freq")
return False, False, 1, None
st=False
en=False
if freq[0]=='<': st=True
if freq[-1]=='>': en=True
if st and en: st=en=False #if both True turn them False
f=freq.strip('<').strip('>') #remove arrows
p=f[-1] #get main period Y,M,W,D
if len(f)>1:
if isfloat(f[:-1]):
n=float(f[:-1])
else:
n=1
else:
n=1
factor=1.0
if p=='Y':
tinterval=n #in years
factor=1.0/n
elif p=='M':
tinterval=(n*1/12) #in years
factor=12.0/n
elif p=='W':
tinterval=(n*1/52) #in years
factor=52.0/n
elif p=='D':
tinterval=(n*1/365.25) #in years
factor=365.25/n
elif p=='A':
tinterval=n
if (not toAge==None and not fromAge==None):
factor=1.0/((toAge-fromAge)*n)
else:
print("toAge and fromAge need to be specified for 'A' in returnFreq")
errors.add("toAge and fromAge need to be specified for 'A' in returnFreq")
factor=1.0/n
else:
#Error wrong period passed
print('Wrong period passed to returnFreq')
errors.add("Wrong period passed to returnFreq")
return st, en, 1, None
return st, en, factor, tinterval
def discountFactor(yrs,discountRate):
#returns the discountFactor after yrs with discountRate
if discountRate==-1:
errors.add('Discount rate is -1')
return None
if yrs<0: return 1
factor=1/(1+discountRate)
return factor**yrs
def termCertain(yrs,discountRate):
if discountRate==-1:
errors.add('Discount rate is -1')
return None
if yrs==0:
return 0
factor=1/(1+discountRate)
if factor==1:
return 1+yrs
else:
return ((factor**yrs)/(math.log(factor)))-(1/math.log(factor))
def is_date(string, fuzzy=False):
"""
Return whether the string can be interpreted as a date.
:param string: str, string to check for date
:param fuzzy: bool, ignore unknown tokens in string if True
"""
try:
parse(string, fuzzy=fuzzy)
return True
except ValueError:
return False
def parsedate(text):
return parse(text, dayfirst=True)
def parsedateString(text):
# text is of format d/m/y
parts = text.split('/')
d = int(parts[0])
m = int(parts[1])
y = int(parts[2])
return datetime(y, m, d)
| 26.492188 | 86 | 0.605131 |
acf2087deb7f449a87f582f794e7ea6f8544b521 | 6,741 | py | Python | geoadmin_api_call.py | nicolas-sitylb/tests | f992e173a35f73605bfd11f50fb46df85d4c3ed0 | [
"X11"
] | null | null | null | geoadmin_api_call.py | nicolas-sitylb/tests | f992e173a35f73605bfd11f50fb46df85d4c3ed0 | [
"X11"
] | null | null | null | geoadmin_api_call.py | nicolas-sitylb/tests | f992e173a35f73605bfd11f50fb46df85d4c3ed0 | [
"X11"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Compute Lidar tile coverage from a list of tile IDs
"""
# %%
import os, sys, re, math
import requests
import json
import numpy as np
import pandas as pd
import fiona
import geojson
import geopandas as gpd
import matplotlib.pyplot as plt
from shapely import geometry, wkt
from shapely.geometry import Polygon, shape
# %%
def jprint(obj):
# create a formatted string of the Python JSON object
text = json.dumps(obj, sort_keys=True, indent=4)
print(text)
def bbox_to_polygon(bbox):
xmin, ymin, xmax, ymax = bbox
polygon = Polygon([
[xmin,ymin],
[xmax,ymin],
[xmax,ymax],
[xmin,ymax],
[xmin,ymin]]
)
return polygon
def get_tile_meta():
layer = 'ch.swisstopo.swisssurface3d.metadata'
base_url = 'https://api3.geo.admin.ch/rest/services/api/MapServer'
find_url = base_url+'/find'
searchField = 'id'
searchText = ''
res0 = requests.get(url = base_url,
params = {'searchText': layer}
)
print(res0.status_code)
data0 = res0.json()
columns = set(data0.keys())
df = pd.DataFrame.from_dict(data0, orient='index').T
return(df)
def get_feature_info2(kw):
headers = {
'user_agent': 'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.7113.93 Safari/537.36',
'content-type': 'application/json'
}
base_url = 'https://api3.geo.admin.ch/rest/services/api/MapServer'
find_url = base_url+'/find'
res = requests.get(url = find_url,
params = kw
)
data = res.json()['results']
gdf = None
if data:
G = gpd.GeoDataFrame(pd.json_normalize(data))
g = gpd.GeoDataFrame(data)['geometry']
gdf = pd.concat([G, g], axis=1)
gdf['geometry'] = gdf['geometry'].apply(lambda x: shape(x))
return gdf
def get_feature_info(**kw):
layer = kw.get('layer') if kw.get('layer') is not None else 'ch.swisstopo.images-swissimage-dop10.metadata'
searchField = kw.get('searchField')
searchText = kw.get('searchText')
headers = {
'user_agent': 'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.7113.93 Safari/537.36',
'content-type': 'application/json'
}
base_url = 'https://api3.geo.admin.ch/rest/services/api/MapServer'
find_url = base_url+'/find'
res = requests.get(url = find_url,
params = {
'layer': layer,
'searchField': searchField,
'searchText': searchText,
'sr': 2056,
'geometryFormat': 'geojson'
}
)
if res.status_code == 200:
retval = res
if False: ## reset after tests
print("We got a record!")
data = res.json()['results']
data_keys = data[0].keys()
idx = data[0]['id']
layerName = data[0]['layerName']
bbox = bbox_to_polygon(data[0]['bbox'])
attributes = data[0]['attributes'] if data[0]['attributes'] else None
layerBodId = data[0]['layerBodId']
tile_geom = data[0]['geometry']['rings']
srs = int([v for k,v in data[0]['geometry']['spatialReference'].items()][0])
polygon = geometry.Polygon(tile_geom[0])
retval = {
'id': idx,
'layerName': layerName,
'bbox': bbox,
'attributes': attributes,
'layerBodId': layerBodId,
'srs': srs,
'geometry': polygon
}
else:
print(f"Error in response, status code is: {res.status_code}")
retval = None
return (res)
def convert_list_of_tiles_to_gdf(tiles_list):
results = []
for tile_id in tiles_list:
print("Processing tile {}".format(tile_id))
search_params = {
'searchField': 'id',
'searchText': tile_id,
'layer': 'ch.swisstopo.images-swissimage-dop10.metadata', #'ch.swisstopo.swisssurface3d.metadata'
'sr': 2056,
'geometryFormat': 'geojson'
}
results.append(get_feature_info2(search_params))
#df = pd.DataFrame(results)
gdf = pd.concat(results)
if 'data' in gdf.columns:
gdf.drop(columns=['data'], inplace=True)
gdf['const'] = 0
print("Processing finished successfully.")
return(gdf)
def get_all_tiles_district_nord_vaudois():
col_min = 2498
col_max = 2551
row_min = 1153
row_max = 1198
width = col_max - col_min
height = row_max - row_min
cells = width * height
blocks = int(math.ceil(cells/(4*height)))
data = []
for row in range(height):
for col in range(width):
st = str(col_min+col)+'_'+str(row_min+row)
print(f"Tile: {st}")
res = (get_feature_info(**{
'searchField': 'id',
'searchText': st,
'layer': 'ch.swisstopo.swisssurface3d.metadata',
'sr': 2056
})
)
if len(res.json()['results']) == 1:
data.append(res.json()['results'][0])
gdf = gpd.GeoDataFrame(data)
gdf['geometry'] = gdf.apply(lambda x: shape(x['geometry']), axis=1)
print("Processing finished successfully.")
return (gdf)
def get_district(inname='vaudois'):
res = get_feature_info(**{
'searchField': 'name',
'searchText': inname,
'layer': 'ch.swisstopo.swissboundaries3d-bezirk-flaeche.fill'
})
data = res.json()['results'][0]
gdf = gpd.GeoDataFrame(pd.DataFrame.from_dict(data, orient='index').T)
district = pd.json_normalize(data)
district['geometry'] = shape(data['geometry'])
district = gpd.GeoDataFrame(district)
return (gdf)
def get_district_tile_coverage(gdf_district):
return None
# %%
# %%
# %%
tiles_list_file = "./INPUT/LIDAR/list_of_lidar_tiles.txt"
stuff = ['lidar', 'swissalti3d', 'swissimage']
files = [ 'list_of_'+element+'_tiles_nv.txt' for element in stuff]
tiles_list_files = [os.path.join('./INPUT', f) for f in files]
for i, tiles_list_file in enumerate(tiles_list_files):
print(f"Processing file {tiles_list_file}...")
with open(tiles_list_file, 'r') as f:
tiles_list = f.readlines();
tiles_list = [re.sub('\r?\n', '', t).strip() for t in tiles_list]
gdf = convert_list_of_tiles_to_gdf(tiles_list)
if 'bbox' in gdf.columns and 'geometry.coordinates' in gdf.columns:
gdf.drop(columns=['bbox','geometry.coordinates'], inplace=True)
gdf.to_file(
filename = os.path.join('OUTPUT', stuff[i]+'_tiles_coverage_new.gpkg'),
driver ='GPKG'
)
# %%
| 30.364865 | 127 | 0.588488 |
acf209c8d5be924fb2d9d109633a1effcb8875c6 | 2,123 | py | Python | bb84/tests/common.py | brunorijsman/simulaqron-bb84-python | e67187373a941fb38bdc0cfe8cc82cb25399cd8b | [
"MIT"
] | 3 | 2020-05-13T06:09:47.000Z | 2021-12-21T21:11:42.000Z | bb84/tests/common.py | brunorijsman/simulaqron-bb84-python | e67187373a941fb38bdc0cfe8cc82cb25399cd8b | [
"MIT"
] | 1 | 2021-06-02T00:45:03.000Z | 2021-06-02T00:45:03.000Z | bb84/tests/common.py | brunorijsman/simulaqron-bb84-python | e67187373a941fb38bdc0cfe8cc82cb25399cd8b | [
"MIT"
] | 3 | 2020-02-15T01:04:20.000Z | 2021-07-22T16:19:02.000Z | import subprocess
import threading
import time
import bb84.server
def start_simulaqron(eve_present=False):
print("Stopping old simulaqron processes...")
subprocess.call(['simulaqron', 'stop'])
time.sleep(3)
print("Old simulaqron processes stopped")
print("Starting Simulaqron...")
nodes = 'Alice,Eve,Bob' if eve_present else 'Alice,Bob'
subprocess.call(['simulaqron', 'start', '--force', '--nodes', nodes, '--topology', 'path'])
time.sleep(3)
print("Simulaqron started")
def stop_simulaqron():
print("Stopping Simulaqron...")
subprocess.call(['simulaqron', 'stop'])
time.sleep(3)
print("Simulaqron stopped")
def run_nodes(key_size, block_size, window_size, eve_present=False, eve_observe_percentage=0):
print("Creating Alice...")
neighbor = "Eve" if eve_present else "Bob"
alice = bb84.Server("Alice", neighbor, trace=True, report=True)
print("Alice created")
print("Starting Alice...")
alice_thread = threading.Thread(target=alice.agree_key)
alice_thread.start()
print("Alice started")
print("Creating Bob...")
neighbor = "Eve" if eve_present else "Alice"
bob = bb84.Client("Bob", neighbor, key_size, window_size, block_size, trace=True, report=True)
print("Bob created")
print("Starting Bob...")
bob_thread = threading.Thread(target=bob.agree_key)
bob_thread.start()
print("Bob started")
if eve_present:
print("Creating Eve...")
eve = bb84.Middle("Eve", "Alice", "Bob", eve_observe_percentage, trace=True, report=True)
print("Eve created")
print("Starting Eve...")
eve_thread = threading.Thread(target=eve.pass_through)
eve_thread.start()
print("Eve started")
print("Waiting for Alice to finish...")
alice_thread.join()
print("Alice finished")
print("Waiting for Bob to finish...")
bob_thread.join()
print("Bob finished")
if eve_present:
print("Waiting for Eve to finish...")
eve_thread.join()
print("Eve finished")
return (alice, bob, eve)
return (alice, bob)
| 27.571429 | 98 | 0.653792 |
acf209dd74067e76db527606d41a8764a3f82d53 | 1,922 | py | Python | Python/Logger_Rate_Limiter.py | treethree/LeetCode | 4c6d6e1ee92d87424fe5b9f20b8eef8d34e74761 | [
"Unlicense"
] | null | null | null | Python/Logger_Rate_Limiter.py | treethree/LeetCode | 4c6d6e1ee92d87424fe5b9f20b8eef8d34e74761 | [
"Unlicense"
] | null | null | null | Python/Logger_Rate_Limiter.py | treethree/LeetCode | 4c6d6e1ee92d87424fe5b9f20b8eef8d34e74761 | [
"Unlicense"
] | null | null | null | # Design a logger system that receive stream of messages along with its timestamps, each message should be printed if and only if it is not printed in the last 10 seconds.
#
# Given a message and a timestamp (in seconds granularity), return true if the message should be printed in the given timestamp, otherwise returns false.
#
# It is possible that several messages arrive roughly at the same time.
#
# Example:
#
# Logger logger = new Logger();
#
# // logging string "foo" at timestamp 1
# logger.shouldPrintMessage(1, "foo"); returns true;
#
# // logging string "bar" at timestamp 2
# logger.shouldPrintMessage(2,"bar"); returns true;
#
# // logging string "foo" at timestamp 3
# logger.shouldPrintMessage(3,"foo"); returns false;
#
# // logging string "bar" at timestamp 8
# logger.shouldPrintMessage(8,"bar"); returns false;
#
# // logging string "foo" at timestamp 10
# logger.shouldPrintMessage(10,"foo"); returns false;
#
# // logging string "foo" at timestamp 11
# logger.shouldPrintMessage(11,"foo"); returns true;
# Time: O(1), amortized
# Space: O(k), k is the max number of printed messages in last 10 seconds
import collections
class Logger(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.__dq = collections.deque()
self.__printed = set()
def shouldPrintMessage(self, timestamp, message):
"""
Returns true if the message should be printed in the given timestamp, otherwise returns false. The timestamp is in seconds granularity.
:type timestamp: int
:type message: str
:rtype: bool
"""
while self.__dq and self.__dq[0][0] <= timestamp - 10:
self.__printed.remove(self.__dq.popleft()[1])
if message in self.__printed:
return False
self.__dq.append((timestamp, message))
self.__printed.add(message)
return True
| 33.719298 | 171 | 0.67898 |
acf20b277d7614776af288bf4f7bc4fd6e9855bd | 3,442 | py | Python | pyalgotrade/tools/resample.py | cdyfng/pyalgotrade | c9e8d950c8d911d5f1bed7d821c4cf6fd37f3a3c | [
"Apache-2.0"
] | 1,000 | 2016-01-26T12:10:11.000Z | 2022-03-01T23:59:50.000Z | pyalgotrade/tools/resample.py | cdyfng/pyalgotrade | c9e8d950c8d911d5f1bed7d821c4cf6fd37f3a3c | [
"Apache-2.0"
] | 22 | 2016-01-26T15:14:09.000Z | 2019-01-30T02:36:38.000Z | pyalgotrade/tools/resample.py | cdyfng/pyalgotrade | c9e8d950c8d911d5f1bed7d821c4cf6fd37f3a3c | [
"Apache-2.0"
] | 613 | 2016-01-27T01:02:30.000Z | 2022-03-21T01:38:58.000Z | # PyAlgoTrade
#
# Copyright 2011-2015 Gabriel Martin Becedillas Ruiz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. moduleauthor:: Gabriel Martin Becedillas Ruiz <gabriel.becedillas@gmail.com>
"""
import os
from pyalgotrade import dispatcher
from pyalgotrade.dataseries import resampled
datetime_format = "%Y-%m-%d %H:%M:%S"
class CSVFileWriter(object):
def __init__(self, csvFile):
self.__file = open(csvFile, "w")
self.__writeLine("Date Time", "Open", "High", "Low", "Close", "Volume", "Adj Close")
def __writeLine(self, *values):
line = ",".join([str(value) for value in values])
self.__file.write(line)
self.__file.write(os.linesep)
def writeBar(self, bar_):
adjClose = bar_.getAdjClose()
if adjClose is None:
adjClose = ""
dateTime = bar_.getDateTime().strftime(datetime_format)
self.__writeLine(
dateTime,
bar_.getOpen(),
bar_.getHigh(),
bar_.getLow(),
bar_.getClose(),
bar_.getVolume(),
adjClose
)
def close(self):
self.__file.close()
def resample_impl(barFeed, frequency, csvFile):
instruments = barFeed.getRegisteredInstruments()
if len(instruments) != 1:
raise Exception("Only barfeeds with 1 instrument can be resampled")
csvWriter = CSVFileWriter(csvFile)
def on_bar(ds, dateTime, value):
csvWriter.writeBar(value)
insrumentDS = barFeed[instruments[0]]
resampledDS = resampled.ResampledBarDataSeries(insrumentDS, frequency)
resampledDS.getNewValueEvent().subscribe(on_bar)
# Process all bars.
disp = dispatcher.Dispatcher()
disp.addSubject(barFeed)
disp.run()
resampledDS.pushLast()
def resample_to_csv(barFeed, frequency, csvFile):
"""Resample a BarFeed into a CSV file grouping bars by a certain frequency.
The resulting file can be loaded using :class:`pyalgotrade.barfeed.csvfeed.GenericBarFeed`.
The CSV file will have the following format:
::
Date Time,Open,High,Low,Close,Volume,Adj Close
2013-01-01 00:00:00,13.51001,13.56,13.51,13.56,273.88014126,13.51001
:param barFeed: The bar feed that will provide the bars. It should only hold bars from a single instrument.
:type barFeed: :class:`pyalgotrade.barfeed.BarFeed`
:param frequency: The grouping frequency in seconds. Must be > 0.
:param csvFile: The path to the CSV file to write.
:type csvFile: string.
.. note::
* Datetimes are stored without timezone information.
* **Adj Close** column may be empty if the input bar feed doesn't have that info.
* Supported resampling frequencies are:
* Less than bar.Frequency.DAY
* bar.Frequency.DAY
* bar.Frequency.MONTH
"""
assert frequency > 0, "Invalid frequency"
resample_impl(barFeed, frequency, csvFile)
| 32.168224 | 111 | 0.675189 |
acf20b7ae06fa8637d6b7126aab9e5559fabce42 | 175 | py | Python | web/frontend/__init__.py | nschrader/project_whiskey | a251cc73127e8d92e4295e67d5d2987341bc1dab | [
"MIT"
] | 2 | 2018-04-30T13:31:48.000Z | 2018-10-01T14:03:28.000Z | web/frontend/__init__.py | nschrader/project_whiskey | a251cc73127e8d92e4295e67d5d2987341bc1dab | [
"MIT"
] | null | null | null | web/frontend/__init__.py | nschrader/project_whiskey | a251cc73127e8d92e4295e67d5d2987341bc1dab | [
"MIT"
] | null | null | null | import sys, inspect
from .views import *
from dao import *
__daos__ = inspect.getmembers(sys.modules["dao"], inspect.isclass)
app.jinja_env.globals.update(**dict(__daos__))
| 21.875 | 66 | 0.76 |
acf20bb9a02869f75eab0e2e2756be13ebdf6f74 | 4,563 | py | Python | OSRGAN/basicsr/metrics/psnr_ssim.py | MehradAria/SR-DCMFD | 7a95ee8c9f29c98461987977180c1e84fb1b4919 | [
"MIT"
] | 3 | 2021-11-05T07:39:57.000Z | 2022-03-18T13:48:03.000Z | basicsr/metrics/psnr_ssim.py | hanlinwu/BasicSR | 0830e4645ac33f3a040d85cf1a363ac987b0437a | [
"Apache-2.0"
] | 1 | 2022-01-14T11:22:15.000Z | 2022-01-14T11:22:15.000Z | basicsr/metrics/psnr_ssim.py | hanlinwu/BasicSR | 0830e4645ac33f3a040d85cf1a363ac987b0437a | [
"Apache-2.0"
] | 1 | 2021-11-05T07:40:08.000Z | 2021-11-05T07:40:08.000Z | import cv2
import numpy as np
from basicsr.metrics.metric_util import reorder_image, to_y_channel
from basicsr.utils.registry import METRIC_REGISTRY
@METRIC_REGISTRY.register()
def calculate_psnr(img1, img2, crop_border, input_order='HWC', test_y_channel=False):
"""Calculate PSNR (Peak Signal-to-Noise Ratio).
Ref: https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio
Args:
img1 (ndarray): Images with range [0, 255].
img2 (ndarray): Images with range [0, 255].
crop_border (int): Cropped pixels in each edge of an image. These
pixels are not involved in the PSNR calculation.
input_order (str): Whether the input order is 'HWC' or 'CHW'.
Default: 'HWC'.
test_y_channel (bool): Test on Y channel of YCbCr. Default: False.
Returns:
float: psnr result.
"""
assert img1.shape == img2.shape, (f'Image shapes are differnet: {img1.shape}, {img2.shape}.')
if input_order not in ['HWC', 'CHW']:
raise ValueError(f'Wrong input_order {input_order}. Supported input_orders are ' '"HWC" and "CHW"')
img1 = reorder_image(img1, input_order=input_order)
img2 = reorder_image(img2, input_order=input_order)
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
if crop_border != 0:
img1 = img1[crop_border:-crop_border, crop_border:-crop_border, ...]
img2 = img2[crop_border:-crop_border, crop_border:-crop_border, ...]
if test_y_channel:
img1 = to_y_channel(img1)
img2 = to_y_channel(img2)
mse = np.mean((img1 - img2)**2)
if mse == 0:
return float('inf')
return 20. * np.log10(255. / np.sqrt(mse))
def _ssim(img1, img2):
"""Calculate SSIM (structural similarity) for one channel images.
It is called by func:`calculate_ssim`.
Args:
img1 (ndarray): Images with range [0, 255] with order 'HWC'.
img2 (ndarray): Images with range [0, 255] with order 'HWC'.
Returns:
float: ssim result.
"""
c1 = (0.01 * 255)**2
c2 = (0.03 * 255)**2
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
kernel = cv2.getGaussianKernel(11, 1.5)
window = np.outer(kernel, kernel.transpose())
mu1 = cv2.filter2D(img1, -1, window)[5:-5, 5:-5]
mu2 = cv2.filter2D(img2, -1, window)[5:-5, 5:-5]
mu1_sq = mu1**2
mu2_sq = mu2**2
mu1_mu2 = mu1 * mu2
sigma1_sq = cv2.filter2D(img1**2, -1, window)[5:-5, 5:-5] - mu1_sq
sigma2_sq = cv2.filter2D(img2**2, -1, window)[5:-5, 5:-5] - mu2_sq
sigma12 = cv2.filter2D(img1 * img2, -1, window)[5:-5, 5:-5] - mu1_mu2
ssim_map = ((2 * mu1_mu2 + c1) * (2 * sigma12 + c2)) / ((mu1_sq + mu2_sq + c1) * (sigma1_sq + sigma2_sq + c2))
return ssim_map.mean()
@METRIC_REGISTRY.register()
def calculate_ssim(img1, img2, crop_border, input_order='HWC', test_y_channel=False):
"""Calculate SSIM (structural similarity).
Ref:
Image quality assessment: From error visibility to structural similarity
The results are the same as that of the official released MATLAB code in
https://ece.uwaterloo.ca/~z70wang/research/ssim/.
For three-channel images, SSIM is calculated for each channel and then
averaged.
Args:
img1 (ndarray): Images with range [0, 255].
img2 (ndarray): Images with range [0, 255].
crop_border (int): Cropped pixels in each edge of an image. These
pixels are not involved in the SSIM calculation.
input_order (str): Whether the input order is 'HWC' or 'CHW'.
Default: 'HWC'.
test_y_channel (bool): Test on Y channel of YCbCr. Default: False.
Returns:
float: ssim result.
"""
assert img1.shape == img2.shape, (f'Image shapes are differnet: {img1.shape}, {img2.shape}.')
if input_order not in ['HWC', 'CHW']:
raise ValueError(f'Wrong input_order {input_order}. Supported input_orders are ' '"HWC" and "CHW"')
img1 = reorder_image(img1, input_order=input_order)
img2 = reorder_image(img2, input_order=input_order)
img1 = img1.astype(np.float64)
img2 = img2.astype(np.float64)
if crop_border != 0:
img1 = img1[crop_border:-crop_border, crop_border:-crop_border, ...]
img2 = img2[crop_border:-crop_border, crop_border:-crop_border, ...]
if test_y_channel:
img1 = to_y_channel(img1)
img2 = to_y_channel(img2)
ssims = []
for i in range(img1.shape[2]):
ssims.append(_ssim(img1[..., i], img2[..., i]))
return np.array(ssims).mean()
| 35.372093 | 114 | 0.644751 |
acf20c4c25ff12709d8a44988e1613299dcdc0d8 | 6,901 | py | Python | main.py | sk8erchoi/rltrader | 290156f1ed5632e703d56dcf36cd1ad3215fcf11 | [
"MIT"
] | null | null | null | main.py | sk8erchoi/rltrader | 290156f1ed5632e703d56dcf36cd1ad3215fcf11 | [
"MIT"
] | null | null | null | main.py | sk8erchoi/rltrader | 290156f1ed5632e703d56dcf36cd1ad3215fcf11 | [
"MIT"
] | null | null | null | import os
import sys
import logging
import argparse
import json
from quantylab.rltrader import settings
from quantylab.rltrader import utils
from quantylab.rltrader import data_manager
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--stock_code', nargs='+')
parser.add_argument('--ver', choices=['v1', 'v2', 'v3', 'v4'], default='v2')
parser.add_argument('--rl_method', choices=['dqn', 'pg', 'ac', 'a2c', 'a3c', 'monkey'])
parser.add_argument('--net', choices=['dnn', 'lstm', 'cnn', 'q3', 'monkey'], default='dnn')
parser.add_argument('--num_steps', type=int, default=1)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--discount_factor', type=float, default=0.9)
parser.add_argument('--start_epsilon', type=float, default=0)
parser.add_argument('--balance', type=int, default=10000000)
parser.add_argument('--num_epoches', type=int, default=100)
parser.add_argument('--backend', choices=['pytorch', 'tensorflow', 'plaidml'], default='pytorch')
parser.add_argument('--output_name', default=utils.get_time_str())
parser.add_argument('--value_network_name')
parser.add_argument('--policy_network_name')
parser.add_argument('--reuse_models', action='store_true')
parser.add_argument('--learning', action='store_true')
parser.add_argument('--start_date', default='20200101')
parser.add_argument('--end_date', default='20201231')
args = parser.parse_args()
# Backend 설정
os.environ['RLTRADER_BACKEND'] = args.backend
if args.backend == 'tensorflow':
os.environ['KERAS_BACKEND'] = 'tensorflow'
elif args.backend == 'plaidml':
os.environ['KERAS_BACKEND'] = 'plaidml.keras.backend'
# 출력 경로 설정
output_path = os.path.join(settings.BASE_DIR,
'output/{}_{}_{}'.format(args.output_name, args.rl_method, args.net))
if not os.path.isdir(output_path):
os.makedirs(output_path)
# 파라미터 기록
with open(os.path.join(output_path, 'params.json'), 'w') as f:
f.write(json.dumps(vars(args)))
# 로그 기록 설정
log_path = os.path.join(output_path, '{}.log'.format(args.output_name))
if os.path.exists(log_path):
os.remove(log_path)
file_handler = logging.FileHandler(filename=log_path, encoding='utf-8')
file_handler.setLevel(logging.INFO)
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setLevel(logging.WARNING)
logging.basicConfig(format="%(message)s", handlers=[file_handler, stream_handler], level=logging.INFO)
# 로그, Keras Backend 설정을 먼저하고 RLTrader 모듈들을 이후에 임포트해야 함
from quantylab.rltrader.learners import ReinforcementLearner, DQNLearner, \
PolicyGradientLearner, ActorCriticLearner, A2CLearner, A3CLearner
# 모델 경로 준비
# 모델 포멧은 TensorFlow는 h5, PyTorch는 pickle
value_network_path = ''
policy_network_path = ''
if args.value_network_name is not None:
value_network_path = os.path.join(settings.BASE_DIR, 'models/{}.mdl'.format(args.value_network_name))
else:
value_network_path = os.path.join(output_path, '{}_{}_{}_value.mdl'.format(args.output_name, args.rl_method, args.net))
if args.policy_network_name is not None:
policy_network_path = os.path.join(settings.BASE_DIR, 'models/{}.mdl'.format(args.policy_network_name))
else:
policy_network_path = os.path.join(output_path, '{}_{}_{}_policy.mdl'.format(args.output_name, args.rl_method, args.net))
common_params = {}
list_stock_code = []
list_chart_data = []
list_training_data = []
list_min_trading_unit = []
list_max_trading_unit = []
for stock_code in args.stock_code:
# 차트 데이터, 학습 데이터 준비
chart_data, training_data = data_manager.load_data(
stock_code, args.start_date, args.end_date, ver=args.ver)
assert len(chart_data) >= args.num_steps
# 최소/최대 투자 단위 설정
min_trading_unit = max(int(100000 / chart_data.iloc[-1]['close']), 1)
max_trading_unit = max(int(1000000 / chart_data.iloc[-1]['close']), 1)
# 공통 파라미터 설정
common_params = {'rl_method': args.rl_method,
'net': args.net, 'num_steps': args.num_steps, 'lr': args.lr,
'balance': args.balance, 'num_epoches': args.num_epoches,
'discount_factor': args.discount_factor, 'start_epsilon': args.start_epsilon,
'output_path': output_path, 'reuse_models': args.reuse_models}
# 강화학습 시작
learner = None
if args.rl_method != 'a3c':
common_params.update({'stock_code': stock_code,
'chart_data': chart_data,
'training_data': training_data,
'min_trading_unit': min_trading_unit,
'max_trading_unit': max_trading_unit})
if args.rl_method == 'dqn':
learner = DQNLearner(**{**common_params,
'value_network_path': value_network_path})
elif args.rl_method == 'pg':
learner = PolicyGradientLearner(**{**common_params,
'policy_network_path': policy_network_path})
elif args.rl_method == 'ac':
learner = ActorCriticLearner(**{**common_params,
'value_network_path': value_network_path,
'policy_network_path': policy_network_path})
elif args.rl_method == 'a2c':
learner = A2CLearner(**{**common_params,
'value_network_path': value_network_path,
'policy_network_path': policy_network_path})
elif args.rl_method == 'monkey':
args.net = args.rl_method
args.num_epoches = 1
args.discount_factor = None
args.start_epsilon = 1
args.learning = False
learner = ReinforcementLearner(**common_params)
if learner is not None:
learner.run(learning=args.learning)
learner.save_models()
else:
list_stock_code.append(stock_code)
list_chart_data.append(chart_data)
list_training_data.append(training_data)
list_min_trading_unit.append(min_trading_unit)
list_max_trading_unit.append(max_trading_unit)
if args.rl_method == 'a3c':
learner = A3CLearner(**{
**common_params,
'list_stock_code': list_stock_code,
'list_chart_data': list_chart_data,
'list_training_data': list_training_data,
'list_min_trading_unit': list_min_trading_unit,
'list_max_trading_unit': list_max_trading_unit,
'value_network_path': value_network_path,
'policy_network_path': policy_network_path})
learner.run(learning=args.learning)
learner.save_models()
| 44.811688 | 129 | 0.643385 |
acf20ca064ce1cb0f1e38d6a9d076bbd92ac3c82 | 2,609 | py | Python | src/utils/common/logging.py | paulwarkentin/pytorch-neural-doodle | 4b0c8da17351ef1662a0ce0bf5979027bafb130e | [
"MIT"
] | 15 | 2018-10-07T14:54:33.000Z | 2021-10-09T11:22:14.000Z | src/utils/common/logging.py | paulwarkentin/pytorch-neural-doodle | 4b0c8da17351ef1662a0ce0bf5979027bafb130e | [
"MIT"
] | null | null | null | src/utils/common/logging.py | paulwarkentin/pytorch-neural-doodle | 4b0c8da17351ef1662a0ce0bf5979027bafb130e | [
"MIT"
] | 2 | 2019-05-31T19:27:38.000Z | 2020-01-08T15:02:12.000Z | ##
## pytorch-neural-doodle/src/utils/common/logging.py
##
## Created by Paul Warkentin <paul@warkentin.email> on 05/08/2018.
## Updated by Paul Warkentin <paul@warkentin.email> on 05/08/2018.
##
import os
import sys
__exec_dir = sys.path[0]
while os.path.basename(__exec_dir) != "src":
__exec_dir = os.path.dirname(__exec_dir)
sys.path.insert(0, __exec_dir)
prefix_name = "torchdoodle"
def logging(*args, **kwargs):
"""Prints a message to the console and flashes the output.
Arguments:
args: List of arguments passed to the print function.
kwargs: Dictionary of arguments passed to the print function.
"""
print(*args, **kwargs)
sys.stdout.flush()
def logging_info(*args, **kwargs):
"""Prints an information message to the console with an app prefix.
Arguments:
args: List of arguments passed to the print function.
kwargs: Dictionary of arguments passed to the print function.
"""
__logging("INFO", *args, **kwargs)
def logging_wait(*args, **kwargs):
"""Prints a waiting message to the console with an app prefix.
Arguments:
args: List of arguments passed to the print function.
kwargs: Dictionary of arguments passed to the print function.
"""
__logging("WAIT", *args, **kwargs)
def logging_warn(*args, should_exit = False, **kwargs):
"""Prints a warning message to the console with an app prefix.
Arguments:
args: List of arguments passed to the print function.
should_exit: Flag whether to exit the application after printing the message. Defaults to False.
kwargs: Dictionary of arguments passed to the print function.
"""
__logging("WARNING", *args, **kwargs)
if should_exit:
exit()
def logging_error(*args, should_exit = True, **kwargs):
"""Prints an error message to the console with an app prefix.
Arguments:
args: List of arguments passed to the print function.
should_exit: Flag whether to exit the application after printing the message. Defaults to True.
kwargs: Dictionary of arguments passed to the print function.
"""
__logging("ERROR", *args, **kwargs)
if should_exit:
exit()
def __logging(preprefix, *args, with_prefix = True, **kwargs):
"""Prints a message to the console with a prefix and flashes the output.
Arguments:
preprefix: Pre-prefix of the message.
args: List of arguments passed to the print function.
with_prefix: Flag whether to print the message with a prefix. Defaults to True.
kwargs: Dictionary of arguments passed to the print function.
"""
print("{}:".format(preprefix), end="")
if with_prefix:
print("{}:".format(prefix_name), end="")
print(*args, **kwargs)
sys.stdout.flush()
| 28.67033 | 98 | 0.729398 |
acf20d569fdc6a5add9f727de015626d97e02c9b | 2,697 | py | Python | django/db/backends/sqlite3/features.py | indevgr/django | 0247c9b08f8da4a2d93b9cede6c615011552b55a | [
"PSF-2.0",
"BSD-3-Clause"
] | 1 | 2017-02-11T07:07:16.000Z | 2017-02-11T07:07:16.000Z | django/db/backends/sqlite3/features.py | indevgr/django | 0247c9b08f8da4a2d93b9cede6c615011552b55a | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/db/backends/sqlite3/features.py | indevgr/django | 0247c9b08f8da4a2d93b9cede6c615011552b55a | [
"PSF-2.0",
"BSD-3-Clause"
] | 2 | 2018-03-30T04:24:48.000Z | 2021-05-09T12:39:09.000Z | from __future__ import unicode_literals
from django.db import utils
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils import six
from django.utils.functional import cached_property
from .base import Database
try:
import pytz
except ImportError:
pytz = None
class DatabaseFeatures(BaseDatabaseFeatures):
# SQLite cannot handle us only partially reading from a cursor's result set
# and then writing the same rows to the database in another cursor. This
# setting ensures we always read result sets fully into memory all in one
# go.
can_use_chunked_reads = False
test_db_allows_multiple_connections = False
supports_unspecified_pk = True
supports_timezones = False
supports_1000_query_parameters = False
supports_mixed_date_datetime_comparisons = False
has_bulk_insert = True
can_combine_inserts_with_and_without_auto_increment_pk = False
supports_foreign_keys = False
supports_column_check_constraints = False
autocommits_when_autocommit_is_off = True
can_introspect_decimal_field = False
can_introspect_positive_integer_field = True
can_introspect_small_integer_field = True
supports_transactions = True
atomic_transactions = False
can_rollback_ddl = True
supports_paramstyle_pyformat = False
supports_sequence_reset = False
can_clone_databases = True
supports_temporal_subtraction = True
@cached_property
def uses_savepoints(self):
return Database.sqlite_version_info >= (3, 6, 8)
@cached_property
def can_release_savepoints(self):
return self.uses_savepoints
@cached_property
def can_share_in_memory_db(self):
return (
six.PY3 and
Database.__name__ == 'sqlite3.dbapi2' and
Database.sqlite_version_info >= (3, 7, 13)
)
@cached_property
def supports_stddev(self):
"""Confirm support for STDDEV and related stats functions
SQLite supports STDDEV as an extension package; so
connection.ops.check_expression_support() can't unilaterally
rule out support for STDDEV. We need to manually check
whether the call works.
"""
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
try:
cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST')
has_support = True
except utils.DatabaseError:
has_support = False
cursor.execute('DROP TABLE STDDEV_TEST')
return has_support
@cached_property
def has_zoneinfo_database(self):
return pytz is not None
| 33.296296 | 79 | 0.714868 |
acf20e6ca19c854910d7443d327117566e6b8f59 | 419 | py | Python | py/motor-3d/raycast-opt.py | dacanizares/IntroCS-ES | 1324b59a3bed86559117b01ad85384d593394d4a | [
"MIT"
] | 2 | 2020-03-21T19:12:10.000Z | 2020-03-27T03:59:41.000Z | py/motor-3d/raycast-opt.py | dacanizares/IntroCS-ES | 1324b59a3bed86559117b01ad85384d593394d4a | [
"MIT"
] | 13 | 2020-03-20T01:27:57.000Z | 2020-08-08T18:20:29.000Z | py/motor-3d/raycast-opt.py | dacanizares/IntroCS-ES | 1324b59a3bed86559117b01ad85384d593394d4a | [
"MIT"
] | null | null | null | def raycast(x, y, rot):
distancia = 0
# Precalculamos los valores...
direccion_x = math.sin(rot)
direccion_y = math.cos(rot)
while distancia < DISTANCIA_MAX_RAYO:
distancia += 0.1
# ...y los reutilizamos aqui
rayo_x = math.floor(x + direccion_x * distancia)
rayo_y = math.floor(y + direccion_y * distancia)
if toca_muro(rayo_x, rayo_y):
return distancia
return DISTANCIA_MAX_RAYO | 29.928571 | 52 | 0.684964 |
acf20ea72bb2b0ba749e0902daddd8a090a497c5 | 1,526 | py | Python | foodgram/users/models.py | sh2MAN/foodgram-project | 0b8b88a4066688318c5decf4869ed89374d0d0ac | [
"BSD-3-Clause"
] | null | null | null | foodgram/users/models.py | sh2MAN/foodgram-project | 0b8b88a4066688318c5decf4869ed89374d0d0ac | [
"BSD-3-Clause"
] | null | null | null | foodgram/users/models.py | sh2MAN/foodgram-project | 0b8b88a4066688318c5decf4869ed89374d0d0ac | [
"BSD-3-Clause"
] | null | null | null | from django.contrib.auth import get_user_model
from django.db import models
from recipes.models import Recipe
User = get_user_model()
class Subscription(models.Model):
"""Подписики на авторов"""
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name='subscriber'
)
author = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name='subscribed'
)
class Meta:
constraints = [
models.UniqueConstraint(
fields=['user', 'author'],
name='unique_subscriptions'
)
]
verbose_name = 'Подписка'
verbose_name_plural = 'Подписки'
def __str__(self):
return f'{self.user.username} -> {self.author.username}'
class Favorite(models.Model):
"""Избранные рецепты пользователя"""
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name='favorites',
verbose_name='Пользователь'
)
recipe = models.ForeignKey(
Recipe,
on_delete=models.CASCADE,
related_name='favorite_user',
verbose_name='Рецепт'
)
class Meta:
constraints = [
models.UniqueConstraint(
fields=['user', 'recipe'],
name='unique_favorites'
)
]
verbose_name = 'Избранное'
verbose_name_plural = 'Избранное'
def __str__(self):
return f'{self.user.username} -*** {self.recipe.title}'
| 24.612903 | 64 | 0.586501 |
acf20fedaa08107fa1d8546941db036a85ea70f7 | 373 | py | Python | store/account_admin/urls.py | Masao987/isi-on-python- | 650a3af1b85bba3a6160dd3718f045532779bdbc | [
"MIT"
] | null | null | null | store/account_admin/urls.py | Masao987/isi-on-python- | 650a3af1b85bba3a6160dd3718f045532779bdbc | [
"MIT"
] | null | null | null | store/account_admin/urls.py | Masao987/isi-on-python- | 650a3af1b85bba3a6160dd3718f045532779bdbc | [
"MIT"
] | null | null | null | from django.conf.urls import patterns, url
from account_admin import views
from .forms import VendorLoginForm
urlpatterns = patterns(
'',
url(r'^login/$', views.login, name='login',
kwargs={'template_name': 'account_admin/login.html',
'authentication_form': VendorLoginForm}),
url(r'^register/$', views.register, name='register'),
)
| 24.866667 | 60 | 0.675603 |
acf210d44045ffb8a76b04cd095640e9b0d114cf | 499 | py | Python | src/tests/part2/q152_test_max_product_sub_array.py | hychrisli/PyAlgorithms | 71e537180f3b371d0d2cc47b11cb68ec13a8ac68 | [
"Apache-2.0"
] | null | null | null | src/tests/part2/q152_test_max_product_sub_array.py | hychrisli/PyAlgorithms | 71e537180f3b371d0d2cc47b11cb68ec13a8ac68 | [
"Apache-2.0"
] | null | null | null | src/tests/part2/q152_test_max_product_sub_array.py | hychrisli/PyAlgorithms | 71e537180f3b371d0d2cc47b11cb68ec13a8ac68 | [
"Apache-2.0"
] | null | null | null | from src.base.test_cases import TestCases
class MaxProductSubArrayTestCases(TestCases):
def __init__(self):
super(MaxProductSubArrayTestCases,self).__init__()
self.__add_test_case__('Test 1', [2, 3, -2, 4], 6)
self.__add_test_case__('Test 2', [2, -3, -2, 4], 48)
self.__add_test_case__('Test 3', [-2], -2)
self.__add_test_case__('Test 4', [], 0)
self.__add_test_case__('Test 5', [2, 0, 3], 3)
self.__add_test_case__('Test 6', [0, 2], 2) | 38.384615 | 60 | 0.627255 |
acf2122e0d01a4c10f0d328d8fc0ab56819a735b | 9,866 | py | Python | experiments/plot-scripts/plot_templates.py | VIDA-NYU/raster-join | c045d159419dc9c30579b9d3f9a88758fabe7937 | [
"BSD-3-Clause"
] | 5 | 2019-04-17T03:32:38.000Z | 2021-12-29T15:45:05.000Z | experiments/plot-scripts/plot_templates.py | VIDA-NYU/raster-join | c045d159419dc9c30579b9d3f9a88758fabe7937 | [
"BSD-3-Clause"
] | 3 | 2018-06-23T08:44:49.000Z | 2018-11-30T23:44:20.000Z | experiments/plot-scripts/plot_templates.py | ViDA-NYU/raster-join | c045d159419dc9c30579b9d3f9a88758fabe7937 | [
"BSD-3-Clause"
] | 2 | 2019-10-28T16:11:04.000Z | 2020-05-05T21:21:48.000Z | import matplotlib, itertools, pylab
import matplotlib.pyplot as mplplot
import matplotlib.ticker
from matplotlib.dates import MonthLocator, DateFormatter
from matplotlib.ticker import MaxNLocator
import numpy as np
import pandas as pd
import seaborn as sns
def bar_plot(
approaches,
inputUnit,
approachLabels,
axisLabels,
colors,
show,
figName,
stacked,
):
fig = mplplot.figure(figsize=(12,9), dpi=200)
with sns.axes_style("white"):
sns.set_style("ticks")
sns.set_context("talk")
BAR_GROUP_WIDTH = 1.0
BAR_GROUP_MARGIN = 0.5
SEPERATE_BARS_PER_GROUP = 3
SINGLE_BAR_MARGIN = 0.1
SINGLE_BAR_WIDTH = (
(BAR_GROUP_WIDTH - (SEPERATE_BARS_PER_GROUP - 1) * SINGLE_BAR_MARGIN)
/ SEPERATE_BARS_PER_GROUP)
assert SINGLE_BAR_WIDTH > 0, \
'Not enough space for the bars, adjust the relevant constants'
# plot details
line_width = 1
opacity = 0.7
fontsize = 37
group_step = BAR_GROUP_WIDTH + BAR_GROUP_MARGIN
pos = []
#positions of the first bar in the group
pos.append(np.arange(start=0, stop=len(inputUnit) * group_step, step=group_step))
for _ in itertools.repeat(None, len(approaches)):
pos.append(pos[-1] + SINGLE_BAR_MARGIN + SINGLE_BAR_WIDTH)
if SEPERATE_BARS_PER_GROUP % 2:
middle_bar_idx = int(SEPERATE_BARS_PER_GROUP / 2)
bar_group_middle = pos[0] + (
middle_bar_idx * (SINGLE_BAR_MARGIN + SINGLE_BAR_WIDTH))
else:
bar_group_middle = pos[0] + BAR_GROUP_WIDTH / 2
# make bar plots
for i in xrange(len(approaches)):
if(stacked):
mplplot.bar(pos[i], approaches[i][0], SINGLE_BAR_WIDTH,
alpha=opacity,
color='white',
edgecolor=colors[i],
linewidth=line_width,
hatch='//',
label=approachLabels[i][0])
mplplot.bar(pos[i], approaches[i][1], SINGLE_BAR_WIDTH,
bottom=approaches[i][0],
color=colors[i],
edgecolor=colors[i],
linewidth=line_width,
label=approachLabels[i][1])
else:
mplplot.bar(pos[i], approaches[i], SINGLE_BAR_WIDTH,
color=colors[i],
edgecolor=colors[i],
linewidth=line_width,
label=approachLabels[i])
mplplot.xticks(bar_group_middle, inputUnit, fontsize=fontsize)
for tick_line in fig.get_axes()[0].get_xticklines():
tick_line.set_visible(False)
mplplot.xlabel(axisLabels[0], fontsize=fontsize, fontweight ='heavy')
mplplot.yticks(fontsize=fontsize)
#pylab.ylim(ymax=0.5)
mplplot.tick_params(axis='y', which='major', pad=15)
mplplot.ylabel(axisLabels[1], fontsize=fontsize, fontweight ='heavy')
sns.despine()
ax = fig.gca()
# Make thicker the spines
mplplot.setp(ax.spines.values(), linewidth=3)
# Make thicker the ticks
ax.xaxis.set_tick_params(width=3)
ax.yaxis.set_tick_params(width=3)
mplplot.legend(loc='best', fontsize=24)
if show:
pylab.legend(loc='center', bbox_to_anchor=(0.5, 1.1), ncol=3, fontsize=fontsize)
#mplplot.show()
mplplot.tight_layout()
fig.savefig(figName, facecolor=fig.get_facecolor(),
edgecolor='none', dpi=200) #bbox_extra_artists=(lgd,) bbox_inches='tight'
def line_plot(
approaches,
inputUnit,
approachLabels,
axisLabels,
colors,
save,
figName,
specificSettings,
):
fig = mplplot.figure(figsize=(12, 9), dpi=200)
fontsize = 37
with sns.axes_style("white"):
sns.set_style("ticks")
sns.set_context("talk")
plot = fig.add_subplot(111)
handles = []
legends = []
markers = [ # defining some markers
'o',
'^',
's',
'v',
'D',
'*',
]
if specificSettings == "figure_10_1" or specificSettings == "figure_10_2":
for i in xrange(len(approaches)):
(h, ) = plot.semilogy (
inputUnit,
approaches[i],
marker=markers[i],
markersize=18,
linewidth=7,
color=colors[i],
)
handles.append(h)
legends.append(approachLabels[i])
else:
for i in xrange(len(approaches)):
(h, ) = plot.plot(
inputUnit,
approaches[i],
marker=markers[i],
markersize=18,
linewidth=7,
color=colors[i],
)
handles.append(h)
legends.append(approachLabels[i])
mplplot.legend(handles, legends, loc='best', fontsize=fontsize)
sns.despine()
ax = fig.gca()
# Make thicker the spines
mplplot.setp(ax.spines.values(), linewidth=3)
# Make thicker the ticks
ax.xaxis.set_tick_params(width=3)
ax.yaxis.set_tick_params(width=3)
xmin, xmax = ax.get_xlim()
ymin, ymax = ax.get_ylim()
if specificSettings == "figure_8_1":
#customize axes ranges
ax.set(xlim=(100, xmax), ylim=(-10, ymax)) #0.8
if specificSettings == "figure_8_2":
mplplot.yticks([0,0.25,0.50,0.75,1,1.25,1.5,1.75,2])
if specificSettings == "figure_9_1":
ax.set(xlim=(400, 900), ylim=(-10, ymax))
mplplot.yticks([0,50,100,150,200,250,300])
mplplot.xticks([400,500,600,700,800])
if specificSettings == "figure_10_1":
ax.set(xlim=(xmin, xmax), ylim=(0.005, ymax))
if specificSettings == "figure_12_1":
ax.set(xlim=(0, 21), ylim=(0, 11))
#custom x ticks
mplplot.xticks([1,2,3,5,10,20])
if specificSettings == "figure_13_1":
ax.set(xlim=(xmin, 2.3), ylim=(0, 58))
if specificSettings == "figure_14_1":
ax.set(xlim=(0, 2050), ylim=(0, 25))
mplplot.xticks([0,250,500,1000,2000])
plot.set_xlabel(axisLabels[0], fontsize=fontsize, fontweight='heavy')
mplplot.xticks(fontsize=fontsize)
plot.set_ylabel(axisLabels[1], fontsize=fontsize, fontweight='heavy')
mplplot.yticks(fontsize=fontsize)
mplplot.tight_layout()
# mplplot.show()
if save:
fig.savefig(figName, facecolor=fig.get_facecolor(),
edgecolor='none', dpi=200) #bbox_inches='tight'
def box_plot(data_to_plot, xlabels, figName, specificSettings):
# Create a figure instance
fig = mplplot.figure(figsize=(12, 9), dpi=200)
fontsize = 35
with sns.axes_style("white"):
sns.set_style("ticks")
sns.set_context("talk")
# Create an axes instance
plot = fig.add_subplot(111)
# Create the boxplot
bp = plot.boxplot(data_to_plot,0, '') #whis = 'range'
## change color and linewidth of the medians
for median in bp['medians']:
median.set(color='#e7298a', linewidth=3)
for box in bp['boxes']:
box.set(linewidth=3)
for whisker in bp['whiskers']:
whisker.set(linewidth=3)
for cap in bp['caps']:
cap.set(linewidth=3)
sns.despine()
ax = fig.gca()
# The spines
mplplot.setp(ax.spines.values(), linewidth=3)
# The ticks
ax.xaxis.set_tick_params(width=3)
ax.yaxis.set_tick_params(width=3)
# custom x-axis labels
ax.set_xticklabels(xlabels)
pylab.ylim(ymin=0)
if specificSettings == "twitter":
pylab.ylim(ymax=5)
mplplot.xlabel("Error bound (in meters)", fontsize=fontsize, fontweight ='heavy')
mplplot.xticks(fontsize=fontsize)
mplplot.ylabel("Percent Error ($\%$)", fontsize=fontsize, fontweight ='heavy')
mplplot.yticks(fontsize=fontsize)
# Save the figure
fig.savefig(figName, facecolor=fig.get_facecolor(),
edgecolor='none', bbox_inches='tight', dpi=200)
mplplot.close()
def scatter_error_plot(figName, zoom, y_err_low, y_err_high, accurate_div, approximate_div):
fontsize = 35
with sns.axes_style("white"):
sns.set_style("ticks")
sns.set_context("talk")
# Create an axes instance
fig, ax = mplplot.subplots(figsize=(12, 9), dpi=200)
ax.errorbar(accurate_div, approximate_div, yerr =[y_err_low, y_err_high], fmt='o', markersize=8, linewidth = 3, ecolor='r', color = 'b')
#xmin, xmax = ax.get_xlim()
ymin, ymax = ax.get_ylim()
if zoom == 0:
ax.set(xlim=(0, ymax), ylim=(0, ymax))
if zoom == 1:
ax.set(xlim=(0.07, 0.28), ylim=(0.07, 0.28))
diag_line, = ax.plot(ax.get_xlim(), ax.get_ylim(), ls="--", c=".3", linewidth = 3)
sns.despine()
# The spines
mplplot.setp(ax.spines.values(), linewidth=3)
# The ticks
ax.xaxis.set_tick_params(width=3)
ax.yaxis.set_tick_params(width=3)
if zoom == 0:
mplplot.xlabel("Accurate Aggregates ($x10^8$)", fontsize=fontsize, fontweight ='bold')
mplplot.ylabel("Approximate Aggregates ($x10^8$)", fontsize=fontsize, fontweight ='bold')
if zoom == 1:
mplplot.xticks(np.arange(0.07, 0.3, 0.05))
mplplot.yticks(np.arange(0.07, 0.3, 0.05))
mplplot.xticks(fontsize=fontsize)
mplplot.yticks(fontsize=fontsize)
fig.savefig(figName, facecolor='white',
edgecolor='none', bbox_inches='tight', dpi=200)
mplplot.close()
| 31.123028 | 144 | 0.573485 |
acf2137022fd0ecce45f0666fc7cb5018d5ccf82 | 458 | py | Python | tools/Sikuli/MoveMouseToPicture.sikuli/MoveMouseToPicture.py | APrudnikovaCenter2m/VA | 1754ef466cdf7322706f0bcb9d456b8cafe85c45 | [
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2021-03-17T13:53:07.000Z | 2021-03-17T13:53:07.000Z | tools/Sikuli/MoveMouseToPicture.sikuli/MoveMouseToPicture.py | APrudnikovaCenter2m/VA | 1754ef466cdf7322706f0bcb9d456b8cafe85c45 | [
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | tools/Sikuli/MoveMouseToPicture.sikuli/MoveMouseToPicture.py | APrudnikovaCenter2m/VA | 1754ef466cdf7322706f0bcb9d456b8cafe85c45 | [
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2019-10-16T07:44:22.000Z | 2019-10-16T07:44:22.000Z | PictureToClick = sys.argv[1]
Find = 0
if Find == 0:
if exists(Pattern(PictureToClick).similar(0.70)):
hover(Pattern(PictureToClick).similar(0.70))
Find = 1
if Find == 0:
if exists(Pattern(PictureToClick).similar(0.60)):
hover(Pattern(PictureToClick).similar(0.60))
Find = 1
if Find == 0:
if exists(Pattern(PictureToClick).similar(0.50)):
hover(Pattern(PictureToClick).similar(0.50))
Find = 1
exit(0)
| 19.913043 | 53 | 0.639738 |
acf21406c14d027b4fab88d1194f1819f853113b | 2,560 | py | Python | flake8_length/_parser.py | joelschutz/flake8-length | 6d934fad85ce1047f057016659edb64159c03046 | [
"MIT"
] | null | null | null | flake8_length/_parser.py | joelschutz/flake8-length | 6d934fad85ce1047f057016659edb64159c03046 | [
"MIT"
] | null | null | null | flake8_length/_parser.py | joelschutz/flake8-length | 6d934fad85ce1047f057016659edb64159c03046 | [
"MIT"
] | null | null | null | # built-in
import tokenize
from typing import Iterator, NamedTuple
SKIP_PREFIXES = ('noqa', 'n:', 'w:', 'e:', 'r:', 'pragma:')
SQL_PREFIXES = ('SELECT ', 'UPDATE', 'DELETE ')
TRUNCATE_TO = 10
EXCLUDED_TOKENS = frozenset({
tokenize.NEWLINE,
tokenize.NL,
tokenize.ENCODING,
tokenize.ENDMARKER,
tokenize.ERRORTOKEN,
tokenize.COMMA,
tokenize.LBRACE,
tokenize.RBRACE,
tokenize.COLON,
})
EXCLUDED_PAIRS = frozenset({
(tokenize.OP, '('),
(tokenize.OP, ')'),
(tokenize.OP, '['),
(tokenize.OP, ']'),
(tokenize.OP, '{'),
(tokenize.OP, '}'),
(tokenize.OP, ','),
(tokenize.OP, ';'),
(tokenize.OP, ':'),
})
class LineInfo(NamedTuple):
row: int
length: int
line: str
def get_line_length(line: str) -> int:
chunks = line.split()
if not chunks:
return len(line)
last_chunk_size = len(chunks[-1])
if last_chunk_size < TRUNCATE_TO:
return len(line)
return len(line) - last_chunk_size + TRUNCATE_TO
def get_lines_info(token: tokenize.TokenInfo) -> Iterator[LineInfo]:
if token.type in EXCLUDED_TOKENS:
return
if (token.type, token.string) in EXCLUDED_PAIRS:
return
if token.type not in {tokenize.COMMENT, tokenize.STRING}:
if token.end[1] > token.start[1]:
yield LineInfo(row=token.end[0], length=token.end[1], line=token.line)
else:
yield LineInfo(row=token.start[0], length=token.start[1], line=token.line)
return
if token.type == tokenize.COMMENT:
# skip shebang
if token.string.startswith('#!'):
return
# skip noqa, pragma, and other special tokens
if token.string.lower()[1:].lstrip().startswith(SKIP_PREFIXES):
return
# skip single-line strings
if token.type == tokenize.STRING and '\n' not in token.string:
# do not skip SQL queries
if token.string.lstrip('brfu').lstrip('"\'').startswith(SQL_PREFIXES):
yield LineInfo(
row=token.start[0],
length=token.start[1] + get_line_length(token.string),
line=token.line,
)
return
# analyze every line of comments and multiline strings
lines = token.string.splitlines()
for offset, line in enumerate(lines):
line_length = get_line_length(line)
if offset == 0:
line_length += token.start[1]
yield LineInfo(
row=token.start[0] + offset,
length=line_length,
line=line,
)
| 27.826087 | 86 | 0.596875 |
acf21494823ae8d84a395706420bd7db889aa1f5 | 36,456 | py | Python | Lib/test/test_asyncio/test_proactor_events.py | fochoao/CPython | c92e0770af558fe3e440e44d3605c3acaf3c5b68 | [
"TCL",
"0BSD"
] | null | null | null | Lib/test/test_asyncio/test_proactor_events.py | fochoao/CPython | c92e0770af558fe3e440e44d3605c3acaf3c5b68 | [
"TCL",
"0BSD"
] | null | null | null | Lib/test/test_asyncio/test_proactor_events.py | fochoao/CPython | c92e0770af558fe3e440e44d3605c3acaf3c5b68 | [
"TCL",
"0BSD"
] | null | null | null | """Tests for proactor_events.py"""
import io
import socket
import unittest
import sys
from unittest import mock
import asyncio
from asyncio.proactor_events import BaseProactorEventLoop
from asyncio.proactor_events import _ProactorSocketTransport
from asyncio.proactor_events import _ProactorWritePipeTransport
from asyncio.proactor_events import _ProactorDuplexPipeTransport
from asyncio.proactor_events import _ProactorDatagramTransport
from test import support
from test.support import socket_helper
from test.test_asyncio import utils as test_utils
def tearDownModule():
asyncio.set_event_loop_policy(None)
def close_transport(transport):
# Don't call transport.close() because the event loop and the IOCP proactor
# are mocked
if transport._sock is None:
return
transport._sock.close()
transport._sock = None
class ProactorSocketTransportTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = self.new_test_loop()
self.addCleanup(self.loop.close)
self.proactor = mock.Mock()
self.loop._proactor = self.proactor
self.protocol = test_utils.make_test_protocol(asyncio.Protocol)
self.sock = mock.Mock(socket.socket)
def socket_transport(self, waiter=None):
transport = _ProactorSocketTransport(self.loop, self.sock,
self.protocol, waiter=waiter)
self.addCleanup(close_transport, transport)
return transport
def test_ctor(self):
fut = self.loop.create_future()
tr = self.socket_transport(waiter=fut)
test_utils.run_briefly(self.loop)
self.assertIsNone(fut.result())
self.protocol.connection_made(tr)
self.proactor.recv.assert_called_with(self.sock, 32768)
def test_loop_reading(self):
tr = self.socket_transport()
tr._loop_reading()
self.loop._proactor.recv.assert_called_with(self.sock, 32768)
self.assertFalse(self.protocol.data_received.called)
self.assertFalse(self.protocol.eof_received.called)
def test_loop_reading_data(self):
res = self.loop.create_future()
res.set_result(b'data')
tr = self.socket_transport()
tr._read_fut = res
tr._loop_reading(res)
self.loop._proactor.recv.assert_called_with(self.sock, 32768)
self.protocol.data_received.assert_called_with(b'data')
@unittest.skipIf(sys.flags.optimize, "Assertions are disabled in optimized mode")
def test_loop_reading_no_data(self):
res = self.loop.create_future()
res.set_result(b'')
tr = self.socket_transport()
self.assertRaises(AssertionError, tr._loop_reading, res)
tr.close = mock.Mock()
tr._read_fut = res
tr._loop_reading(res)
self.assertFalse(self.loop._proactor.recv.called)
self.assertTrue(self.protocol.eof_received.called)
self.assertTrue(tr.close.called)
def test_loop_reading_aborted(self):
err = self.loop._proactor.recv.side_effect = ConnectionAbortedError()
tr = self.socket_transport()
tr._fatal_error = mock.Mock()
tr._loop_reading()
tr._fatal_error.assert_called_with(
err,
'Fatal read error on pipe transport')
def test_loop_reading_aborted_closing(self):
self.loop._proactor.recv.side_effect = ConnectionAbortedError()
tr = self.socket_transport()
tr._closing = True
tr._fatal_error = mock.Mock()
tr._loop_reading()
self.assertFalse(tr._fatal_error.called)
def test_loop_reading_aborted_is_fatal(self):
self.loop._proactor.recv.side_effect = ConnectionAbortedError()
tr = self.socket_transport()
tr._closing = False
tr._fatal_error = mock.Mock()
tr._loop_reading()
self.assertTrue(tr._fatal_error.called)
def test_loop_reading_conn_reset_lost(self):
err = self.loop._proactor.recv.side_effect = ConnectionResetError()
tr = self.socket_transport()
tr._closing = False
tr._fatal_error = mock.Mock()
tr._force_close = mock.Mock()
tr._loop_reading()
self.assertFalse(tr._fatal_error.called)
tr._force_close.assert_called_with(err)
def test_loop_reading_exception(self):
err = self.loop._proactor.recv.side_effect = (OSError())
tr = self.socket_transport()
tr._fatal_error = mock.Mock()
tr._loop_reading()
tr._fatal_error.assert_called_with(
err,
'Fatal read error on pipe transport')
def test_write(self):
tr = self.socket_transport()
tr._loop_writing = mock.Mock()
tr.write(b'data')
self.assertEqual(tr._buffer, None)
tr._loop_writing.assert_called_with(data=b'data')
def test_write_no_data(self):
tr = self.socket_transport()
tr.write(b'')
self.assertFalse(tr._buffer)
def test_write_more(self):
tr = self.socket_transport()
tr._write_fut = mock.Mock()
tr._loop_writing = mock.Mock()
tr.write(b'data')
self.assertEqual(tr._buffer, b'data')
self.assertFalse(tr._loop_writing.called)
def test_loop_writing(self):
tr = self.socket_transport()
tr._buffer = bytearray(b'data')
tr._loop_writing()
self.loop._proactor.send.assert_called_with(self.sock, b'data')
self.loop._proactor.send.return_value.add_done_callback.\
assert_called_with(tr._loop_writing)
@mock.patch('asyncio.proactor_events.logger')
def test_loop_writing_err(self, m_log):
err = self.loop._proactor.send.side_effect = OSError()
tr = self.socket_transport()
tr._fatal_error = mock.Mock()
tr._buffer = [b'da', b'ta']
tr._loop_writing()
tr._fatal_error.assert_called_with(
err,
'Fatal write error on pipe transport')
tr._conn_lost = 1
tr.write(b'data')
tr.write(b'data')
tr.write(b'data')
tr.write(b'data')
tr.write(b'data')
self.assertEqual(tr._buffer, None)
m_log.warning.assert_called_with('socket.send() raised exception.')
def test_loop_writing_stop(self):
fut = self.loop.create_future()
fut.set_result(b'data')
tr = self.socket_transport()
tr._write_fut = fut
tr._loop_writing(fut)
self.assertIsNone(tr._write_fut)
def test_loop_writing_closing(self):
fut = self.loop.create_future()
fut.set_result(1)
tr = self.socket_transport()
tr._write_fut = fut
tr.close()
tr._loop_writing(fut)
self.assertIsNone(tr._write_fut)
test_utils.run_briefly(self.loop)
self.protocol.connection_lost.assert_called_with(None)
def test_abort(self):
tr = self.socket_transport()
tr._force_close = mock.Mock()
tr.abort()
tr._force_close.assert_called_with(None)
def test_close(self):
tr = self.socket_transport()
tr.close()
test_utils.run_briefly(self.loop)
self.protocol.connection_lost.assert_called_with(None)
self.assertTrue(tr.is_closing())
self.assertEqual(tr._conn_lost, 1)
self.protocol.connection_lost.reset_mock()
tr.close()
test_utils.run_briefly(self.loop)
self.assertFalse(self.protocol.connection_lost.called)
def test_close_write_fut(self):
tr = self.socket_transport()
tr._write_fut = mock.Mock()
tr.close()
test_utils.run_briefly(self.loop)
self.assertFalse(self.protocol.connection_lost.called)
def test_close_buffer(self):
tr = self.socket_transport()
tr._buffer = [b'data']
tr.close()
test_utils.run_briefly(self.loop)
self.assertFalse(self.protocol.connection_lost.called)
def test_close_invalid_sockobj(self):
tr = self.socket_transport()
self.sock.fileno.return_value = -1
tr.close()
test_utils.run_briefly(self.loop)
self.protocol.connection_lost.assert_called_with(None)
self.assertFalse(self.sock.shutdown.called)
@mock.patch('asyncio.base_events.logger')
def test_fatal_error(self, m_logging):
tr = self.socket_transport()
tr._force_close = mock.Mock()
tr._fatal_error(None)
self.assertTrue(tr._force_close.called)
self.assertTrue(m_logging.error.called)
def test_force_close(self):
tr = self.socket_transport()
tr._buffer = [b'data']
read_fut = tr._read_fut = mock.Mock()
write_fut = tr._write_fut = mock.Mock()
tr._force_close(None)
read_fut.cancel.assert_called_with()
write_fut.cancel.assert_called_with()
test_utils.run_briefly(self.loop)
self.protocol.connection_lost.assert_called_with(None)
self.assertEqual(None, tr._buffer)
self.assertEqual(tr._conn_lost, 1)
def test_loop_writing_force_close(self):
exc_handler = mock.Mock()
self.loop.set_exception_handler(exc_handler)
fut = self.loop.create_future()
fut.set_result(1)
self.proactor.send.return_value = fut
tr = self.socket_transport()
tr.write(b'data')
tr._force_close(None)
test_utils.run_briefly(self.loop)
exc_handler.assert_not_called()
def test_force_close_idempotent(self):
tr = self.socket_transport()
tr._closing = True
tr._force_close(None)
test_utils.run_briefly(self.loop)
self.assertFalse(self.protocol.connection_lost.called)
def test_fatal_error_2(self):
tr = self.socket_transport()
tr._buffer = [b'data']
tr._force_close(None)
test_utils.run_briefly(self.loop)
self.protocol.connection_lost.assert_called_with(None)
self.assertEqual(None, tr._buffer)
def test_call_connection_lost(self):
tr = self.socket_transport()
tr._call_connection_lost(None)
self.assertTrue(self.protocol.connection_lost.called)
self.assertTrue(self.sock.close.called)
def test_write_eof(self):
tr = self.socket_transport()
self.assertTrue(tr.can_write_eof())
tr.write_eof()
self.sock.shutdown.assert_called_with(socket.SHUT_WR)
tr.write_eof()
self.assertEqual(self.sock.shutdown.call_count, 1)
tr.close()
def test_write_eof_buffer(self):
tr = self.socket_transport()
f = self.loop.create_future()
tr._loop._proactor.send.return_value = f
tr.write(b'data')
tr.write_eof()
self.assertTrue(tr._eof_written)
self.assertFalse(self.sock.shutdown.called)
tr._loop._proactor.send.assert_called_with(self.sock, b'data')
f.set_result(4)
self.loop._run_once()
self.sock.shutdown.assert_called_with(socket.SHUT_WR)
tr.close()
def test_write_eof_write_pipe(self):
tr = _ProactorWritePipeTransport(
self.loop, self.sock, self.protocol)
self.assertTrue(tr.can_write_eof())
tr.write_eof()
self.assertTrue(tr.is_closing())
self.loop._run_once()
self.assertTrue(self.sock.close.called)
tr.close()
def test_write_eof_buffer_write_pipe(self):
tr = _ProactorWritePipeTransport(self.loop, self.sock, self.protocol)
f = self.loop.create_future()
tr._loop._proactor.send.return_value = f
tr.write(b'data')
tr.write_eof()
self.assertTrue(tr.is_closing())
self.assertFalse(self.sock.shutdown.called)
tr._loop._proactor.send.assert_called_with(self.sock, b'data')
f.set_result(4)
self.loop._run_once()
self.loop._run_once()
self.assertTrue(self.sock.close.called)
tr.close()
def test_write_eof_duplex_pipe(self):
tr = _ProactorDuplexPipeTransport(
self.loop, self.sock, self.protocol)
self.assertFalse(tr.can_write_eof())
with self.assertRaises(NotImplementedError):
tr.write_eof()
close_transport(tr)
def test_pause_resume_reading(self):
tr = self.socket_transport()
futures = []
for msg in [b'data1', b'data2', b'data3', b'data4', b'data5', b'']:
f = self.loop.create_future()
f.set_result(msg)
futures.append(f)
self.loop._proactor.recv.side_effect = futures
self.loop._run_once()
self.assertFalse(tr._paused)
self.assertTrue(tr.is_reading())
self.loop._run_once()
self.protocol.data_received.assert_called_with(b'data1')
self.loop._run_once()
self.protocol.data_received.assert_called_with(b'data2')
tr.pause_reading()
tr.pause_reading()
self.assertTrue(tr._paused)
self.assertFalse(tr.is_reading())
for i in range(10):
self.loop._run_once()
self.protocol.data_received.assert_called_with(b'data2')
tr.resume_reading()
tr.resume_reading()
self.assertFalse(tr._paused)
self.assertTrue(tr.is_reading())
self.loop._run_once()
self.protocol.data_received.assert_called_with(b'data3')
self.loop._run_once()
self.protocol.data_received.assert_called_with(b'data4')
tr.pause_reading()
tr.resume_reading()
self.loop.call_exception_handler = mock.Mock()
self.loop._run_once()
self.loop.call_exception_handler.assert_not_called()
self.protocol.data_received.assert_called_with(b'data5')
tr.close()
self.assertFalse(tr.is_reading())
def pause_writing_transport(self, high):
tr = self.socket_transport()
tr.set_write_buffer_limits(high=high)
self.assertEqual(tr.get_write_buffer_size(), 0)
self.assertFalse(self.protocol.pause_writing.called)
self.assertFalse(self.protocol.resume_writing.called)
return tr
def test_pause_resume_writing(self):
tr = self.pause_writing_transport(high=4)
# write a large chunk, must pause writing
fut = self.loop.create_future()
self.loop._proactor.send.return_value = fut
tr.write(b'large data')
self.loop._run_once()
self.assertTrue(self.protocol.pause_writing.called)
# flush the buffer
fut.set_result(None)
self.loop._run_once()
self.assertEqual(tr.get_write_buffer_size(), 0)
self.assertTrue(self.protocol.resume_writing.called)
def test_pause_writing_2write(self):
tr = self.pause_writing_transport(high=4)
# first short write, the buffer is not full (3 <= 4)
fut1 = self.loop.create_future()
self.loop._proactor.send.return_value = fut1
tr.write(b'123')
self.loop._run_once()
self.assertEqual(tr.get_write_buffer_size(), 3)
self.assertFalse(self.protocol.pause_writing.called)
# fill the buffer, must pause writing (6 > 4)
tr.write(b'abc')
self.loop._run_once()
self.assertEqual(tr.get_write_buffer_size(), 6)
self.assertTrue(self.protocol.pause_writing.called)
def test_pause_writing_3write(self):
tr = self.pause_writing_transport(high=4)
# first short write, the buffer is not full (1 <= 4)
fut = self.loop.create_future()
self.loop._proactor.send.return_value = fut
tr.write(b'1')
self.loop._run_once()
self.assertEqual(tr.get_write_buffer_size(), 1)
self.assertFalse(self.protocol.pause_writing.called)
# second short write, the buffer is not full (3 <= 4)
tr.write(b'23')
self.loop._run_once()
self.assertEqual(tr.get_write_buffer_size(), 3)
self.assertFalse(self.protocol.pause_writing.called)
# fill the buffer, must pause writing (6 > 4)
tr.write(b'abc')
self.loop._run_once()
self.assertEqual(tr.get_write_buffer_size(), 6)
self.assertTrue(self.protocol.pause_writing.called)
def test_dont_pause_writing(self):
tr = self.pause_writing_transport(high=4)
# write a large chunk which completes immediately,
# it should not pause writing
fut = self.loop.create_future()
fut.set_result(None)
self.loop._proactor.send.return_value = fut
tr.write(b'very large data')
self.loop._run_once()
self.assertEqual(tr.get_write_buffer_size(), 0)
self.assertFalse(self.protocol.pause_writing.called)
class ProactorDatagramTransportTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = self.new_test_loop()
self.proactor = mock.Mock()
self.loop._proactor = self.proactor
self.protocol = test_utils.make_test_protocol(asyncio.DatagramProtocol)
self.sock = mock.Mock(spec_set=socket.socket)
self.sock.fileno.return_value = 7
def datagram_transport(self, address=None):
self.sock.getpeername.side_effect = None if address else OSError
transport = _ProactorDatagramTransport(self.loop, self.sock,
self.protocol,
address=address)
self.addCleanup(close_transport, transport)
return transport
def test_sendto(self):
data = b'data'
transport = self.datagram_transport()
transport.sendto(data, ('0.0.0.0', 1234))
self.assertTrue(self.proactor.sendto.called)
self.proactor.sendto.assert_called_with(
self.sock, data, addr=('0.0.0.0', 1234))
def test_sendto_bytearray(self):
data = bytearray(b'data')
transport = self.datagram_transport()
transport.sendto(data, ('0.0.0.0', 1234))
self.assertTrue(self.proactor.sendto.called)
self.proactor.sendto.assert_called_with(
self.sock, b'data', addr=('0.0.0.0', 1234))
def test_sendto_memoryview(self):
data = memoryview(b'data')
transport = self.datagram_transport()
transport.sendto(data, ('0.0.0.0', 1234))
self.assertTrue(self.proactor.sendto.called)
self.proactor.sendto.assert_called_with(
self.sock, b'data', addr=('0.0.0.0', 1234))
def test_sendto_no_data(self):
transport = self.datagram_transport()
transport._buffer.append((b'data', ('0.0.0.0', 12345)))
transport.sendto(b'', ())
self.assertFalse(self.sock.sendto.called)
self.assertEqual(
[(b'data', ('0.0.0.0', 12345))], list(transport._buffer))
def test_sendto_buffer(self):
transport = self.datagram_transport()
transport._buffer.append((b'data1', ('0.0.0.0', 12345)))
transport._write_fut = object()
transport.sendto(b'data2', ('0.0.0.0', 12345))
self.assertFalse(self.proactor.sendto.called)
self.assertEqual(
[(b'data1', ('0.0.0.0', 12345)),
(b'data2', ('0.0.0.0', 12345))],
list(transport._buffer))
def test_sendto_buffer_bytearray(self):
data2 = bytearray(b'data2')
transport = self.datagram_transport()
transport._buffer.append((b'data1', ('0.0.0.0', 12345)))
transport._write_fut = object()
transport.sendto(data2, ('0.0.0.0', 12345))
self.assertFalse(self.proactor.sendto.called)
self.assertEqual(
[(b'data1', ('0.0.0.0', 12345)),
(b'data2', ('0.0.0.0', 12345))],
list(transport._buffer))
self.assertIsInstance(transport._buffer[1][0], bytes)
def test_sendto_buffer_memoryview(self):
data2 = memoryview(b'data2')
transport = self.datagram_transport()
transport._buffer.append((b'data1', ('0.0.0.0', 12345)))
transport._write_fut = object()
transport.sendto(data2, ('0.0.0.0', 12345))
self.assertFalse(self.proactor.sendto.called)
self.assertEqual(
[(b'data1', ('0.0.0.0', 12345)),
(b'data2', ('0.0.0.0', 12345))],
list(transport._buffer))
self.assertIsInstance(transport._buffer[1][0], bytes)
@mock.patch('asyncio.proactor_events.logger')
def test_sendto_exception(self, m_log):
data = b'data'
err = self.proactor.sendto.side_effect = RuntimeError()
transport = self.datagram_transport()
transport._fatal_error = mock.Mock()
transport.sendto(data, ())
self.assertTrue(transport._fatal_error.called)
transport._fatal_error.assert_called_with(
err,
'Fatal write error on datagram transport')
transport._conn_lost = 1
transport._address = ('123',)
transport.sendto(data)
transport.sendto(data)
transport.sendto(data)
transport.sendto(data)
transport.sendto(data)
m_log.warning.assert_called_with('socket.sendto() raised exception.')
def test_sendto_error_received(self):
data = b'data'
self.sock.sendto.side_effect = ConnectionRefusedError
transport = self.datagram_transport()
transport._fatal_error = mock.Mock()
transport.sendto(data, ())
self.assertEqual(transport._conn_lost, 0)
self.assertFalse(transport._fatal_error.called)
def test_sendto_error_received_connected(self):
data = b'data'
self.proactor.send.side_effect = ConnectionRefusedError
transport = self.datagram_transport(address=('0.0.0.0', 1))
transport._fatal_error = mock.Mock()
transport.sendto(data)
self.assertFalse(transport._fatal_error.called)
self.assertTrue(self.protocol.error_received.called)
def test_sendto_str(self):
transport = self.datagram_transport()
self.assertRaises(TypeError, transport.sendto, 'str', ())
def test_sendto_connected_addr(self):
transport = self.datagram_transport(address=('0.0.0.0', 1))
self.assertRaises(
ValueError, transport.sendto, b'str', ('0.0.0.0', 2))
def test_sendto_closing(self):
transport = self.datagram_transport(address=(1,))
transport.close()
self.assertEqual(transport._conn_lost, 1)
transport.sendto(b'data', (1,))
self.assertEqual(transport._conn_lost, 2)
def test__loop_writing_closing(self):
transport = self.datagram_transport()
transport._closing = True
transport._loop_writing()
self.assertIsNone(transport._write_fut)
test_utils.run_briefly(self.loop)
self.sock.close.assert_called_with()
self.protocol.connection_lost.assert_called_with(None)
def test__loop_writing_exception(self):
err = self.proactor.sendto.side_effect = RuntimeError()
transport = self.datagram_transport()
transport._fatal_error = mock.Mock()
transport._buffer.append((b'data', ()))
transport._loop_writing()
transport._fatal_error.assert_called_with(
err,
'Fatal write error on datagram transport')
def test__loop_writing_error_received(self):
self.proactor.sendto.side_effect = ConnectionRefusedError
transport = self.datagram_transport()
transport._fatal_error = mock.Mock()
transport._buffer.append((b'data', ()))
transport._loop_writing()
self.assertFalse(transport._fatal_error.called)
def test__loop_writing_error_received_connection(self):
self.proactor.send.side_effect = ConnectionRefusedError
transport = self.datagram_transport(address=('0.0.0.0', 1))
transport._fatal_error = mock.Mock()
transport._buffer.append((b'data', ()))
transport._loop_writing()
self.assertFalse(transport._fatal_error.called)
self.assertTrue(self.protocol.error_received.called)
@mock.patch('asyncio.base_events.logger.error')
def test_fatal_error_connected(self, m_exc):
transport = self.datagram_transport(address=('0.0.0.0', 1))
err = ConnectionRefusedError()
transport._fatal_error(err)
self.assertFalse(self.protocol.error_received.called)
m_exc.assert_not_called()
class BaseProactorEventLoopTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.sock = test_utils.mock_nonblocking_socket()
self.proactor = mock.Mock()
self.ssock, self.csock = mock.Mock(), mock.Mock()
with mock.patch('asyncio.proactor_events.socket.socketpair',
return_value=(self.ssock, self.csock)):
with mock.patch('signal.set_wakeup_fd'):
self.loop = BaseProactorEventLoop(self.proactor)
self.set_event_loop(self.loop)
@mock.patch('asyncio.proactor_events.socket.socketpair')
def test_ctor(self, socketpair):
ssock, csock = socketpair.return_value = (
mock.Mock(), mock.Mock())
with mock.patch('signal.set_wakeup_fd'):
loop = BaseProactorEventLoop(self.proactor)
self.assertIs(loop._ssock, ssock)
self.assertIs(loop._csock, csock)
self.assertEqual(loop._internal_fds, 1)
loop.close()
def test_close_self_pipe(self):
self.loop._close_self_pipe()
self.assertEqual(self.loop._internal_fds, 0)
self.assertTrue(self.ssock.close.called)
self.assertTrue(self.csock.close.called)
self.assertIsNone(self.loop._ssock)
self.assertIsNone(self.loop._csock)
# Don't call close(): _close_self_pipe() cannot be called twice
self.loop._closed = True
def test_close(self):
self.loop._close_self_pipe = mock.Mock()
self.loop.close()
self.assertTrue(self.loop._close_self_pipe.called)
self.assertTrue(self.proactor.close.called)
self.assertIsNone(self.loop._proactor)
self.loop._close_self_pipe.reset_mock()
self.loop.close()
self.assertFalse(self.loop._close_self_pipe.called)
def test_make_socket_transport(self):
tr = self.loop._make_socket_transport(self.sock, asyncio.Protocol())
self.assertIsInstance(tr, _ProactorSocketTransport)
close_transport(tr)
def test_loop_self_reading(self):
self.loop._loop_self_reading()
self.proactor.recv.assert_called_with(self.ssock, 4096)
self.proactor.recv.return_value.add_done_callback.assert_called_with(
self.loop._loop_self_reading)
def test_loop_self_reading_fut(self):
fut = mock.Mock()
self.loop._self_reading_future = fut
self.loop._loop_self_reading(fut)
self.assertTrue(fut.result.called)
self.proactor.recv.assert_called_with(self.ssock, 4096)
self.proactor.recv.return_value.add_done_callback.assert_called_with(
self.loop._loop_self_reading)
def test_loop_self_reading_exception(self):
self.loop.call_exception_handler = mock.Mock()
self.proactor.recv.side_effect = OSError()
self.loop._loop_self_reading()
self.assertTrue(self.loop.call_exception_handler.called)
def test_write_to_self(self):
self.loop._write_to_self()
self.csock.send.assert_called_with(b'\0')
def test_process_events(self):
self.loop._process_events([])
@mock.patch('asyncio.base_events.logger')
def test_create_server(self, m_log):
pf = mock.Mock()
call_soon = self.loop.call_soon = mock.Mock()
self.loop._start_serving(pf, self.sock)
self.assertTrue(call_soon.called)
# callback
loop = call_soon.call_args[0][0]
loop()
self.proactor.accept.assert_called_with(self.sock)
# conn
fut = mock.Mock()
fut.result.return_value = (mock.Mock(), mock.Mock())
make_tr = self.loop._make_socket_transport = mock.Mock()
loop(fut)
self.assertTrue(fut.result.called)
self.assertTrue(make_tr.called)
# exception
fut.result.side_effect = OSError()
loop(fut)
self.assertTrue(self.sock.close.called)
self.assertTrue(m_log.error.called)
def test_create_server_cancel(self):
pf = mock.Mock()
call_soon = self.loop.call_soon = mock.Mock()
self.loop._start_serving(pf, self.sock)
loop = call_soon.call_args[0][0]
# cancelled
fut = self.loop.create_future()
fut.cancel()
loop(fut)
self.assertTrue(self.sock.close.called)
def test_stop_serving(self):
sock1 = mock.Mock()
future1 = mock.Mock()
sock2 = mock.Mock()
future2 = mock.Mock()
self.loop._accept_futures = {
sock1.fileno(): future1,
sock2.fileno(): future2
}
self.loop._stop_serving(sock1)
self.assertTrue(sock1.close.called)
self.assertTrue(future1.cancel.called)
self.proactor._stop_serving.assert_called_with(sock1)
self.assertFalse(sock2.close.called)
self.assertFalse(future2.cancel.called)
def datagram_transport(self):
self.protocol = test_utils.make_test_protocol(asyncio.DatagramProtocol)
return self.loop._make_datagram_transport(self.sock, self.protocol)
def test_make_datagram_transport(self):
tr = self.datagram_transport()
self.assertIsInstance(tr, _ProactorDatagramTransport)
self.assertIsInstance(tr, asyncio.DatagramTransport)
close_transport(tr)
def test_datagram_loop_writing(self):
tr = self.datagram_transport()
tr._buffer.appendleft((b'data', ('127.0.0.1', 12068)))
tr._loop_writing()
self.loop._proactor.sendto.assert_called_with(self.sock, b'data', addr=('127.0.0.1', 12068))
self.loop._proactor.sendto.return_value.add_done_callback.\
assert_called_with(tr._loop_writing)
close_transport(tr)
def test_datagram_loop_reading(self):
tr = self.datagram_transport()
tr._loop_reading()
self.loop._proactor.recvfrom.assert_called_with(self.sock, 256 * 1024)
self.assertFalse(self.protocol.datagram_received.called)
self.assertFalse(self.protocol.error_received.called)
close_transport(tr)
def test_datagram_loop_reading_data(self):
res = self.loop.create_future()
res.set_result((b'data', ('127.0.0.1', 12068)))
tr = self.datagram_transport()
tr._read_fut = res
tr._loop_reading(res)
self.loop._proactor.recvfrom.assert_called_with(self.sock, 256 * 1024)
self.protocol.datagram_received.assert_called_with(b'data', ('127.0.0.1', 12068))
close_transport(tr)
@unittest.skipIf(sys.flags.optimize, "Assertions are disabled in optimized mode")
def test_datagram_loop_reading_no_data(self):
res = self.loop.create_future()
res.set_result((b'', ('127.0.0.1', 12068)))
tr = self.datagram_transport()
self.assertRaises(AssertionError, tr._loop_reading, res)
tr.close = mock.Mock()
tr._read_fut = res
tr._loop_reading(res)
self.assertTrue(self.loop._proactor.recvfrom.called)
self.assertFalse(self.protocol.error_received.called)
self.assertFalse(tr.close.called)
close_transport(tr)
def test_datagram_loop_reading_aborted(self):
err = self.loop._proactor.recvfrom.side_effect = ConnectionAbortedError()
tr = self.datagram_transport()
tr._fatal_error = mock.Mock()
tr._protocol.error_received = mock.Mock()
tr._loop_reading()
tr._protocol.error_received.assert_called_with(err)
close_transport(tr)
def test_datagram_loop_writing_aborted(self):
err = self.loop._proactor.sendto.side_effect = ConnectionAbortedError()
tr = self.datagram_transport()
tr._fatal_error = mock.Mock()
tr._protocol.error_received = mock.Mock()
tr._buffer.appendleft((b'Hello', ('127.0.0.1', 12068)))
tr._loop_writing()
tr._protocol.error_received.assert_called_with(err)
close_transport(tr)
@unittest.skipIf(sys.platform != 'win32',
'Proactor is supported on Windows only')
class ProactorEventLoopUnixSockSendfileTests(test_utils.TestCase):
DATA = b"12345abcde" * 16 * 1024 # 160 KiB
class MyProto(asyncio.Protocol):
def __init__(self, loop):
self.started = False
self.closed = False
self.data = bytearray()
self.fut = loop.create_future()
self.transport = None
def connection_made(self, transport):
self.started = True
self.transport = transport
def data_received(self, data):
self.data.extend(data)
def connection_lost(self, exc):
self.closed = True
self.fut.set_result(None)
async def wait_closed(self):
await self.fut
@classmethod
def setUpClass(cls):
with open(support.TESTFN, 'wb') as fp:
fp.write(cls.DATA)
super().setUpClass()
@classmethod
def tearDownClass(cls):
support.unlink(support.TESTFN)
super().tearDownClass()
def setUp(self):
self.loop = asyncio.ProactorEventLoop()
self.set_event_loop(self.loop)
self.addCleanup(self.loop.close)
self.file = open(support.TESTFN, 'rb')
self.addCleanup(self.file.close)
super().setUp()
def make_socket(self, cleanup=True):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setblocking(False)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1024)
if cleanup:
self.addCleanup(sock.close)
return sock
def run_loop(self, coro):
return self.loop.run_until_complete(coro)
def prepare(self):
sock = self.make_socket()
proto = self.MyProto(self.loop)
port = socket_helper.find_unused_port()
srv_sock = self.make_socket(cleanup=False)
srv_sock.bind(('127.0.0.1', port))
server = self.run_loop(self.loop.create_server(
lambda: proto, sock=srv_sock))
self.run_loop(self.loop.sock_connect(sock, srv_sock.getsockname()))
def cleanup():
if proto.transport is not None:
# can be None if the task was cancelled before
# connection_made callback
proto.transport.close()
self.run_loop(proto.wait_closed())
server.close()
self.run_loop(server.wait_closed())
self.addCleanup(cleanup)
return sock, proto
def test_sock_sendfile_not_a_file(self):
sock, proto = self.prepare()
f = object()
with self.assertRaisesRegex(asyncio.SendfileNotAvailableError,
"not a regular file"):
self.run_loop(self.loop._sock_sendfile_native(sock, f,
0, None))
self.assertEqual(self.file.tell(), 0)
def test_sock_sendfile_iobuffer(self):
sock, proto = self.prepare()
f = io.BytesIO()
with self.assertRaisesRegex(asyncio.SendfileNotAvailableError,
"not a regular file"):
self.run_loop(self.loop._sock_sendfile_native(sock, f,
0, None))
self.assertEqual(self.file.tell(), 0)
def test_sock_sendfile_not_regular_file(self):
sock, proto = self.prepare()
f = mock.Mock()
f.fileno.return_value = -1
with self.assertRaisesRegex(asyncio.SendfileNotAvailableError,
"not a regular file"):
self.run_loop(self.loop._sock_sendfile_native(sock, f,
0, None))
self.assertEqual(self.file.tell(), 0)
if __name__ == '__main__':
unittest.main()
| 35.811395 | 100 | 0.642967 |
acf216a121361ac43b8560620903c02788359ec7 | 8,401 | py | Python | vagrant/auditd/replayUserAudit.py | Sebisnow/blogcode | 94589f08ce097b07ccae1b1cb48b013bff9a98ce | [
"MIT"
] | 34 | 2017-05-19T12:46:28.000Z | 2022-02-03T10:51:25.000Z | vagrant/auditd/replayUserAudit.py | Sebisnow/blogcode | 94589f08ce097b07ccae1b1cb48b013bff9a98ce | [
"MIT"
] | 2 | 2018-05-10T15:33:02.000Z | 2019-02-21T13:10:14.000Z | vagrant/auditd/replayUserAudit.py | Sebisnow/blogcode | 94589f08ce097b07ccae1b1cb48b013bff9a98ce | [
"MIT"
] | 32 | 2017-05-08T03:48:16.000Z | 2022-03-11T15:51:55.000Z | #!/usr/bin/python3
import logging
import sys
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
import time
import os
import pwd
from datetime import date,timedelta,datetime
import subprocess
from pprint import pprint
import locale
# argument check
if len(sys.argv)<2:
print ("USAGE: username [startDate=YYYY-MM-DD|today|yesterday|this-week]")
print ("EXAMPLE: alice today")
sys.exit(1)
# basic audit record fields
class AuditRecord:
def __init__(self):
ts = date.today()
type = ""
pid = 0
ppid = 0
euid = 0
auid = 0
# USER_CMD has response code and full command
user_res = ""
user_cmd = ""
# EXECVE has full command
execve_fullcmd = ""
# SYSCALL has success (yes|no) and exit code
# if success=no, then no EXECVE
syscall_success = ""
syscall_exit = 0
def callProcess(cmdstr):
# search audit trail for userid
print ("exec: {}".format(cmdstr))
p = subprocess.Popen(cmdstr, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
p_status = p.wait()
return output, p_status, err
def splitAndMakeDict(str,splitBy,keyBy):
dict = {}
fields = str.split(splitBy)
for f in fields:
if keyBy in f:
(k,v) = f.split(keyBy,1)
dict[k] = v
return dict
def concatDictArgs(dict,keyPrefix):
retstr = ""
for x in range(0,20):
lookForKey = "{}{}".format(keyPrefix,x)
#print ("looking for key {}".format(lookForKey))
if lookForKey in dict:
retstr = retstr + dict[lookForKey].strip('\"') + " "
return retstr
def parseRecordList(rawList):
output = rawList
recList = []
# iterate through record set, decode() necessary for python3
recordlist = output.decode().split('----')
for record in recordlist:
# go through each line in a record
auditrec = AuditRecord()
linelist = record.split('\n')
for line in linelist:
if line.strip()=="----" or line.strip()=="" :
continue
if line.startswith("time->"):
line = line[6:]
datetime_object = datetime.strptime(line,'%a %b %d %H:%M:%S %Y')
auditrec.ts = datetime_object.strftime('%Y-%m-%d %H:%M:%S')
if line.startswith("type=EXECVE"):
dict = splitAndMakeDict(line,' ','=')
auditrec.execve_fullcmd = concatDictArgs(dict,'a')
if line.startswith("type=USER_CMD") or line.startswith("type=SYSCALL") or line.startswith("type=USER_AUTH"):
#print ("LINE {}".format(line))
dict = splitAndMakeDict(line,' ','=')
auditrec.type = dict['type']
if 'uid' in dict:
auditrec.uid = dict['uid']
if 'pid' in dict:
auditrec.pid = dict['pid']
if 'ppid' in dict:
auditrec.ppid = dict['ppid']
if 'auid' in dict:
auditrec.auid = dict['auid']
if 'res' in dict:
# USER_CMD success code
#print ("found 'res' {}".format(dict['res']))
auditrec.user_res = dict['res']
# chop off last char if single quote
if auditrec.user_res.endswith("'"):
auditrec.user_res = auditrec.user_res[:-1]
if 'success' in dict:
auditrec.syscall_success = dict['success']
if auditrec.syscall_success=="yes":
auditrec.syscall_success = "success"
elif auditrec.syscall_success=="no":
auditrec.syscall_success = "fail"
if 'exit' in dict:
auditrec.syscall_exit = dict['exit']
if 'cmd' in dict:
# cmd needs to be parsed manually
index1 = line.index("cmd=")
# up to next field with =
index2 = line.index("=",index1+5)
auditrec.user_cmd = line[index1:index2]
# still need to chop off last field
lastspace = auditrec.user_cmd.rindex(' ')
auditrec.user_cmd = auditrec.user_cmd[:lastspace]
# used when not using '-i'
#try:
# auditrec.user_cmd = dict['cmd'].decode("hex")
#except:
# auditrec.user_cmd = dict['cmd']
if "msg=audit(" in line:
# timestamp needs to be parsed manually
index1 = line.index("msg=audit(")
index2 = line.index(")",index1+10)
auditrec.ts = line[index1+10:index2]
if False: #line.startswith("type=SYSCALL"):
# only SYSCALL, and no EXECVE if failure
auditrec.type = "SYSCALL"
dict = splitAndMakeDict(line,' ','=')
auditrec.syscall_success = dict['success']
if auditrec.syscall_success=="yes":
auditrec.syscall_success = "success"
elif auditrec.syscall_success=="no":
auditrec.syscall_success = "fail"
auditrec.syscall_exit = dict['exit']
# add record to returning list
recList.append(auditrec)
return recList
############### MAIN ###############################
# simple parse for arguments
# use today startdate if not specified
username = sys.argv[1]
if len(sys.argv)<3:
curlocale = locale.getdefaultlocale()
locale.setlocale(locale.LC_TIME, curlocale)
today = date.today()
# hardcoding this date format means other locale would not work
#startDate = today.strftime('%m/%d/%Y')
startDate = time.strftime('%x')
print ("Getting today date {} based on locale {}".format(startDate,curlocale))
else:
startDate = sys.argv[2]
print ("startDate: " + startDate)
# must run as root/sudo to search audit reports
if os.geteuid()!=0:
print ("ERROR - must run as root/sudo in order to search audit logs")
sys.exit(2)
# get userid based on name
try:
userid = pwd.getpwnam(username).pw_uid
except:
print ("ERROR trying to resolve user id for {}".format(username))
sys.exit(3)
print ("Going to trace commands for user {} with id {}".format(username,userid))
# search audit trail for userid
cmdstr="sudo ausearch -ui {} -i -ts {}".format(userid,startDate)
(output, pstatus, err) = callProcess(cmdstr)
#print ("Command return code: ", pstatus)
# iterate through record list
print ("----Commands by {} starting at {}-----".format(username,startDate))
recList = parseRecordList(output)
for p in recList:
if not hasattr(p,'type'):
continue
#pprint(vars(p))
if p.type.startswith("USER_CMD"):
print ("{:10s} {:10s} at {} by {}/{} executing {}".format(p.type,p.user_res,p.ts,p.auid,p.uid,p.user_cmd))
elif p.type.startswith("SYSCALL"):
print ("{:10s} {:10s} at {} by {}/{} executing {}".format(p.type,p.syscall_success,p.ts,p.auid,p.uid,p.execve_fullcmd if hasattr(p,"execve_fullcmd") else "?"))
elif p.type.startswith("USER_AUTH"):
print ("{:10s} {:10s} at {} by {}/{}".format(p.type,p.user_res,p.ts,p.auid,p.uid))
sys.exit(4)
for rec in recList:
pprint(vars(rec))
# search audit trail for process
try:
cmdstr="sudo ausearch -p {}".format(rec.user_pid)
(output, pstatus, err) = callProcess(cmdstr)
#print ("Command return code: ", pstatus)
print ("PROCESS")
pList = parseRecordList(output)
for p in pList:
#pprint(vars(p))
print ("{} by {}/{} executing {}".format(p.user_res,p.ts,p.auid,p.uid,p.user_cmd))
except AttributeError:
pass # ok if pid does not exist for record
# search audit trail for parent process
try:
cmdstr="sudo ausearch -pp {}".format(rec.user_ppid)
(output, pstatus, err) = callProcess(cmdstr)
#print ("Command return code: ", pstatus)
print ("PARENT PROCESS")
ppList = parseRecordList(output)
for pp in ppList:
pprint(vars(pp))
except AttributeError:
pass # ok if parent pid does not exist for record
# search root audit trail for process
try:
cmdstr="sudo ausearch -k rootcmd -p {}".format(rec.user_ppid)
(output, pstatus, err) = callProcess(cmdstr)
#print ("Command return code: ", pstatus)
print ("ROOT PROCESS")
ppList = parseRecordList(output)
for pp in ppList:
pprint(vars(pp))
except AttributeError:
pass # ok if parent pid does not exist for record
# search root audit trail for parent process
try:
cmdstr="sudo ausearch -k rootcmd -pp {}".format(rec.user_ppid)
(output, pstatus, err) = callProcess(cmdstr)
#print ("Command return code: ", pstatus)
print ("ROOT PARENT PROCESS")
ppList = parseRecordList(output)
for pp in ppList:
pprint(vars(pp))
except AttributeError:
pass # ok if parent pid does not exist for record
print ("***REC******************************************\n\n")
| 31.347015 | 163 | 0.626235 |
acf217612c1279dfc1ee9951f8bac6d2a3189fe6 | 651 | py | Python | tests/models/NFM_test.py | YoungXueya/DeepCTR | 51835e7de83049bfeaa92cd53ee394446f797822 | [
"Apache-2.0"
] | 2 | 2020-06-15T06:18:32.000Z | 2020-07-24T07:05:55.000Z | tests/models/NFM_test.py | YoungXueya/DeepCTR | 51835e7de83049bfeaa92cd53ee394446f797822 | [
"Apache-2.0"
] | null | null | null | tests/models/NFM_test.py | YoungXueya/DeepCTR | 51835e7de83049bfeaa92cd53ee394446f797822 | [
"Apache-2.0"
] | 3 | 2020-05-31T12:43:38.000Z | 2021-01-17T00:17:27.000Z | import pytest
from deepctr.models import NFM
from ..utils import check_model, get_test_data,SAMPLE_SIZE
@pytest.mark.parametrize(
'hidden_size,sparse_feature_num',
[((8,), 1), ((8, 8,), 2)]
)
def test_NFM(hidden_size, sparse_feature_num):
model_name = "NFM"
sample_size = SAMPLE_SIZE
x, y, feature_columns = get_test_data(sample_size, sparse_feature_num=sparse_feature_num,
dense_feature_num=sparse_feature_num)
model = NFM(feature_columns,feature_columns, dnn_hidden_units=[32, 32], dnn_dropout=0.5)
check_model(model, model_name, x, y)
if __name__ == "__main__":
pass
| 27.125 | 93 | 0.69278 |
acf219353a6a09a08fa9ce48093aeba852d07909 | 4,295 | py | Python | pyinfra/facts/init.py | vamshi091211/pyinfra | 6e14b039422e00ebc68110eabbc6a3a543c96279 | [
"MIT"
] | 1 | 2022-03-24T05:44:45.000Z | 2022-03-24T05:44:45.000Z | pyinfra/facts/init.py | marinakravchenko21/pyinfra | 6e14b039422e00ebc68110eabbc6a3a543c96279 | [
"MIT"
] | null | null | null | pyinfra/facts/init.py | marinakravchenko21/pyinfra | 6e14b039422e00ebc68110eabbc6a3a543c96279 | [
"MIT"
] | 1 | 2021-11-12T18:36:01.000Z | 2021-11-12T18:36:01.000Z | import re
from pyinfra.api import FactBase
class UpstartStatus(FactBase):
'''
Returns a dict of name -> status for upstart managed services.
'''
command = 'initctl list'
regex = r'^([a-z\-]+) [a-z]+\/([a-z]+)'
default = dict
def process(self, output):
services = {}
for line in output:
matches = re.match(self.regex, line)
if matches:
services[matches.group(1)] = matches.group(2) == 'running'
return services
class SystemdStatus(FactBase):
'''
Returns a dict of name -> status for systemd managed services.
'''
command = 'systemctl -al list-units'
regex = r'^([a-zA-Z\-0-9]+\.[a-z]+)\s+[a-z\-]+\s+[a-z]+\s+([a-z]+)'
default = dict
def process(self, output):
services = {}
for line in output:
matches = re.match(self.regex, line)
if matches:
services[matches.group(1)] = matches.group(2) == 'running'
return services
class SystemdEnabled(FactBase):
'''
Returns a dict of name -> whether enabled for systemd managed services.
'''
command = '''
systemctl --no-legend -al list-unit-files | while read -r UNIT STATUS; do
if [ "$STATUS" = generated ] &&
systemctl is-enabled $UNIT >/dev/null 2>&1; then
STATUS=enabled
fi
echo $UNIT $STATUS
done
'''
regex = r'^([a-zA-Z0-9@\-]+\.[a-z]+)\s+([a-z]+)'
default = dict
def process(self, output):
units = {}
for line in output:
matches = re.match(self.regex, line)
if matches:
units[matches.group(1)] = (
matches.group(2) in ('enabled', 'static')
)
return units
class InitdStatus(FactBase):
'''
Low level check for every /etc/init.d/* script. Unfortunately many of these
mishehave and return exit status 0 while also displaying the help info/not
offering status support.
Returns a dict of name -> status.
Expected codes found at:
http://refspecs.linuxbase.org/LSB_3.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html
'''
command = '''
for SERVICE in `ls /etc/init.d/`; do
_=`cat /etc/init.d/$SERVICE | grep "### BEGIN INIT INFO"`
if [ "$?" = "0" ]; then
STATUS=`/etc/init.d/$SERVICE status`
echo "$SERVICE=$?"
fi
done
'''
regex = r'([a-zA-Z0-9\-]+)=([0-9]+)'
default = dict
def process(self, output):
services = {}
for line in output:
matches = re.match(self.regex, line)
if matches:
status = int(matches.group(2))
# Exit code 0 = OK/running
if status == 0:
status = True
# Exit codes 1-3 = DOWN/not running
elif status < 4:
status = False
# Exit codes 4+ = unknown
else:
status = None
services[matches.group(1)] = status
return services
class RcdStatus(InitdStatus):
'''
Same as ``initd_status`` but for BSD (/etc/rc.d) systems. Unlike Linux/init.d,
BSD init scripts are well behaved and as such their output can be trusted.
'''
command = '''
for SERVICE in `ls /etc/rc.d/`; do
_=`cat /etc/rc.d/$SERVICE | grep "daemon="`
if [ "$?" = "0" ]; then
STATUS=`/etc/rc.d/$SERVICE check`
echo "$SERVICE=$?"
fi
done
'''
default = dict
class LaunchdStatus(FactBase):
'''
Returns a dict of name -> status for launchd managed services.
'''
command = 'launchctl list'
default = dict
def process(self, output):
services = {}
for line in output:
bits = line.split()
if not bits or bits[0] == 'PID':
continue
name = bits[2]
status = False
try:
int(bits[0])
status = True
except ValueError:
pass
services[name] = status
return services
| 24.265537 | 98 | 0.505006 |
acf219ca517e4b3901d8bfc2999e604085976c6d | 3,608 | py | Python | stac_fastapi/sqlalchemy/tests/conftest.py | radiantearth/stac-fastapi | 8c78311a89d0fa25bfb46b60a2ac041bfdec82d9 | [
"MIT"
] | null | null | null | stac_fastapi/sqlalchemy/tests/conftest.py | radiantearth/stac-fastapi | 8c78311a89d0fa25bfb46b60a2ac041bfdec82d9 | [
"MIT"
] | null | null | null | stac_fastapi/sqlalchemy/tests/conftest.py | radiantearth/stac-fastapi | 8c78311a89d0fa25bfb46b60a2ac041bfdec82d9 | [
"MIT"
] | null | null | null | import json
import os
from typing import Callable, Dict
import pytest
from starlette.testclient import TestClient
from stac_fastapi.api.app import StacApi
from stac_fastapi.extensions.core import (
ContextExtension,
FieldsExtension,
QueryExtension,
SortExtension,
TransactionExtension,
)
from stac_fastapi.sqlalchemy.config import SqlalchemySettings
from stac_fastapi.sqlalchemy.core import CoreCrudClient
from stac_fastapi.sqlalchemy.models import database
from stac_fastapi.sqlalchemy.session import Session
from stac_fastapi.sqlalchemy.transactions import (
BulkTransactionsClient,
TransactionsClient,
)
from stac_fastapi.sqlalchemy.types.search import SQLAlchemySTACSearch
from stac_fastapi.types.config import Settings
DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
class TestSettings(SqlalchemySettings):
class Config:
env_file = ".env.test"
settings = TestSettings()
Settings.set(settings)
@pytest.fixture(autouse=True)
def cleanup(postgres_core: CoreCrudClient, postgres_transactions: TransactionsClient):
yield
collections = postgres_core.all_collections(request=MockStarletteRequest)
for coll in collections:
if coll["id"].split("-")[0] == "test":
# Delete the items
items = postgres_core.item_collection(
coll["id"], limit=100, request=MockStarletteRequest
)
for feat in items["features"]:
postgres_transactions.delete_item(
feat["id"], feat["collection"], request=MockStarletteRequest
)
# Delete the collection
postgres_transactions.delete_collection(
coll["id"], request=MockStarletteRequest
)
@pytest.fixture
def load_test_data() -> Callable[[str], Dict]:
def load_file(filename: str) -> Dict:
with open(os.path.join(DATA_DIR, filename)) as file:
return json.load(file)
return load_file
class MockStarletteRequest:
base_url = "http://test-server"
@pytest.fixture
def db_session() -> Session:
return Session(
reader_conn_string=settings.reader_connection_string,
writer_conn_string=settings.writer_connection_string,
)
@pytest.fixture
def postgres_core(db_session):
return CoreCrudClient(
session=db_session,
item_table=database.Item,
collection_table=database.Collection,
token_table=database.PaginationToken,
)
@pytest.fixture
def postgres_transactions(db_session):
return TransactionsClient(
session=db_session,
item_table=database.Item,
collection_table=database.Collection,
)
@pytest.fixture
def postgres_bulk_transactions(db_session):
return BulkTransactionsClient(session=db_session)
@pytest.fixture
def api_client(db_session):
settings = SqlalchemySettings()
return StacApi(
settings=settings,
client=CoreCrudClient(session=db_session),
extensions=[
TransactionExtension(
client=TransactionsClient(session=db_session), settings=settings
),
ContextExtension(),
SortExtension(),
FieldsExtension(),
QueryExtension(),
],
search_request_model=SQLAlchemySTACSearch,
)
@pytest.fixture
def app_client(api_client, load_test_data, postgres_transactions):
coll = load_test_data("test_collection.json")
postgres_transactions.create_collection(coll, request=MockStarletteRequest)
with TestClient(api_client.app) as test_app:
yield test_app
| 27.541985 | 86 | 0.706208 |
acf21a5250b9b320a995874dd8c024125b0d510f | 1,518 | py | Python | scripts/mc_publish_summary.py | HERA-Team/hera_mc | bc4d57e5e9c5ac8cd2a5b1a356de1742c2f70d8e | [
"BSD-2-Clause"
] | 4 | 2018-01-28T06:58:00.000Z | 2020-04-16T11:19:38.000Z | scripts/mc_publish_summary.py | HERA-Team/hera_mc | bc4d57e5e9c5ac8cd2a5b1a356de1742c2f70d8e | [
"BSD-2-Clause"
] | 474 | 2016-10-26T17:29:54.000Z | 2022-02-15T21:51:52.000Z | scripts/mc_publish_summary.py | HERA-Team/hera_mc | bc4d57e5e9c5ac8cd2a5b1a356de1742c2f70d8e | [
"BSD-2-Clause"
] | 2 | 2016-11-15T14:34:55.000Z | 2020-11-02T08:07:19.000Z | #! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
"""This publishes a webpage on hera-today.
If not on qmaster, it just writes the html file.
"""
from hera_mc import mc, cm_utils, cm_sysutils
if __name__ == '__main__':
default_hookup_cols = [
'station', 'feed', 'front-end', 'node-bulkhead', 'post-amp',
'snap', 'node'
]
parser = mc.get_mc_argument_parser()
# set values for 'action' to use
parser.add_argument('-p', '--hpn', help="Part number, csv-list or [default]",
default='default')
parser.add_argument('-e', '--exact-match',
help="Force exact matches on part numbers, not beginning N char. [False]",
dest='exact_match', action='store_true')
parser.add_argument('--hookup-cols',
help="Specify a subset of parts to show comma-delimited no-space list.",
dest='hookup_cols', default=default_hookup_cols)
args = parser.parse_args()
# Pre-process the args
args.hpn = cm_utils.listify(args.hpn)
args.hookup_cols = cm_utils.listify(args.hookup_cols)
# Start session
db = mc.connect_to_mc_db(args)
session = db.sessionmaker()
system = cm_sysutils.Handling(session)
system.publish_summary(hlist=args.hpn, exact_match=args.exact_match,
hookup_cols=args.hookup_cols,
)
| 36.142857 | 98 | 0.616601 |
acf21a7a8f1120d29066bca68ed37b236e408666 | 6,899 | py | Python | libs/datasets/sources/can_scraper_state_providers.py | ConsultingMD/covid-data-model | f3d23f98eccca1ce33c7a11ab546e9aab5d0aa4d | [
"MIT"
] | null | null | null | libs/datasets/sources/can_scraper_state_providers.py | ConsultingMD/covid-data-model | f3d23f98eccca1ce33c7a11ab546e9aab5d0aa4d | [
"MIT"
] | null | null | null | libs/datasets/sources/can_scraper_state_providers.py | ConsultingMD/covid-data-model | f3d23f98eccca1ce33c7a11ab546e9aab5d0aa4d | [
"MIT"
] | null | null | null | from libs.datasets import data_source
from covidactnow.datapublic.common_fields import CommonFields
from libs.datasets.sources import can_scraper_helpers as ccd_helpers
class CANScraperStateProviders(data_source.CanScraperBase):
SOURCE_NAME = "CANScrapersStateProviders"
EXPECTED_FIELDS = [
CommonFields.STAFFED_BEDS,
CommonFields.CASES,
CommonFields.DEATHS,
CommonFields.VACCINES_ALLOCATED,
CommonFields.VACCINES_ADMINISTERED,
CommonFields.VACCINES_DISTRIBUTED,
CommonFields.VACCINATIONS_INITIATED,
CommonFields.VACCINATIONS_COMPLETED,
CommonFields.TOTAL_TESTS_VIRAL,
CommonFields.ICU_BEDS,
CommonFields.CURRENT_HOSPITALIZED,
CommonFields.POSITIVE_TESTS_VIRAL,
CommonFields.CURRENT_ICU,
CommonFields.VACCINATIONS_INITIATED_PCT,
CommonFields.VACCINATIONS_COMPLETED_PCT,
]
VARIABLES = [
ccd_helpers.ScraperVariable(variable_name="pcr_tests_negative", provider="state"),
ccd_helpers.ScraperVariable(variable_name="unspecified_tests_total", provider="state"),
ccd_helpers.ScraperVariable(variable_name="unspecified_tests_positive", provider="state"),
ccd_helpers.ScraperVariable(variable_name="icu_beds_available", provider="state"),
ccd_helpers.ScraperVariable(variable_name="antibody_tests_total", provider="state"),
ccd_helpers.ScraperVariable(variable_name="antigen_tests_positive", provider="state"),
ccd_helpers.ScraperVariable(variable_name="antigen_tests_negative", provider="state"),
ccd_helpers.ScraperVariable(
variable_name="total_vaccine_doses_administered", provider="state"
),
ccd_helpers.ScraperVariable(variable_name="hospital_beds_in_use", provider="state"),
ccd_helpers.ScraperVariable(variable_name="ventilators_in_use", provider="state"),
ccd_helpers.ScraperVariable(variable_name="ventilators_available", provider="state"),
ccd_helpers.ScraperVariable(variable_name="ventilators_capacity", provider="state"),
ccd_helpers.ScraperVariable(variable_name="pediatric_icu_beds_in_use", provider="state"),
ccd_helpers.ScraperVariable(variable_name="adult_icu_beds_available", provider="state"),
ccd_helpers.ScraperVariable(variable_name="pediatric_icu_beds_capacity", provider="state"),
ccd_helpers.ScraperVariable(variable_name="unspecified_tests_negative", provider="state"),
ccd_helpers.ScraperVariable(variable_name="antigen_tests_total", provider="state"),
ccd_helpers.ScraperVariable(variable_name="adult_icu_beds_in_use", provider="state"),
ccd_helpers.ScraperVariable(variable_name="hospital_beds_available", provider="state"),
ccd_helpers.ScraperVariable(variable_name="pediatric_icu_beds_available", provider="state"),
ccd_helpers.ScraperVariable(variable_name="adult_icu_beds_capacity", provider="state"),
ccd_helpers.ScraperVariable(variable_name="icu_beds_in_use", provider="state"),
ccd_helpers.ScraperVariable(
variable_name="cases",
measurement="cumulative",
unit="people",
provider="state",
common_field=CommonFields.CASES,
),
ccd_helpers.ScraperVariable(
variable_name="deaths",
measurement="cumulative",
unit="people",
provider="state",
common_field=CommonFields.DEATHS,
),
ccd_helpers.ScraperVariable(
variable_name="hospital_beds_in_use_covid",
measurement="current",
unit="beds",
provider="state",
common_field=CommonFields.CURRENT_HOSPITALIZED,
),
ccd_helpers.ScraperVariable(
variable_name="hospital_beds_capacity",
measurement="current",
unit="beds",
provider="state",
common_field=CommonFields.STAFFED_BEDS,
),
ccd_helpers.ScraperVariable(
variable_name="icu_beds_capacity",
measurement="current",
unit="beds",
provider="state",
common_field=CommonFields.ICU_BEDS,
),
ccd_helpers.ScraperVariable(
variable_name="icu_beds_in_use_covid",
measurement="current",
unit="beds",
provider="state",
common_field=CommonFields.CURRENT_ICU,
),
ccd_helpers.ScraperVariable(
variable_name="pcr_tests_total",
measurement="cumulative",
unit="specimens", # Ignores less common unit=test_encounters and unit=unique_people
provider="state",
common_field=CommonFields.TOTAL_TESTS_VIRAL,
),
ccd_helpers.ScraperVariable(
variable_name="pcr_tests_positive",
measurement="cumulative",
unit="specimens", # Ignores test_encounters and unique_people
provider="state",
common_field=CommonFields.POSITIVE_TESTS_VIRAL,
),
ccd_helpers.ScraperVariable(
variable_name="total_vaccine_allocated",
measurement="cumulative",
unit="doses",
provider="state",
common_field=CommonFields.VACCINES_ALLOCATED,
),
ccd_helpers.ScraperVariable(
variable_name="total_vaccine_distributed",
measurement="cumulative",
unit="doses",
provider="state",
common_field=CommonFields.VACCINES_DISTRIBUTED,
),
ccd_helpers.ScraperVariable(
variable_name="total_vaccine_initiated",
measurement="cumulative",
unit="people",
provider="state",
common_field=CommonFields.VACCINATIONS_INITIATED,
),
ccd_helpers.ScraperVariable(
variable_name="total_vaccine_initiated",
measurement="current",
unit="percentage",
provider="state",
common_field=CommonFields.VACCINATIONS_INITIATED_PCT,
),
ccd_helpers.ScraperVariable(
variable_name="total_vaccine_completed",
measurement="cumulative",
unit="people",
provider="state",
common_field=CommonFields.VACCINATIONS_COMPLETED,
),
ccd_helpers.ScraperVariable(
variable_name="total_vaccine_completed",
measurement="current",
unit="percentage",
provider="state",
common_field=CommonFields.VACCINATIONS_COMPLETED_PCT,
),
ccd_helpers.ScraperVariable(
variable_name="total_vaccine_doses_administered",
measurement="cumulative",
unit="doses",
provider="state",
common_field=CommonFields.VACCINES_ADMINISTERED,
),
]
| 43.664557 | 100 | 0.66285 |
acf21af58d917ad941a6729251a815455b7366c4 | 23,152 | py | Python | kmip/core/messages/payloads/derive_key.py | oleksiys/PyKMIP | c8f4c8c94d29ae9b16c28654f14e5530b4a32f0a | [
"Apache-2.0"
] | null | null | null | kmip/core/messages/payloads/derive_key.py | oleksiys/PyKMIP | c8f4c8c94d29ae9b16c28654f14e5530b4a32f0a | [
"Apache-2.0"
] | null | null | null | kmip/core/messages/payloads/derive_key.py | oleksiys/PyKMIP | c8f4c8c94d29ae9b16c28654f14e5530b4a32f0a | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2017 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from kmip.core import attributes
from kmip.core import enums
from kmip.core import exceptions
from kmip.core import objects
from kmip.core import primitives
from kmip.core import utils
class DeriveKeyRequestPayload(primitives.Struct):
"""
A request payload for the DeriveKey operation.
Attributes:
object_type: The type of the object that should be derived.
unique_identifiers: A set of unique IDs of managed objects to be used
with the derivation process.
derivation_method: The method that should be used to derive the new
cryptographic object.
derivation_parameters: A collection of settings relevant for the
derivation method.
template_attribute: A collection of attributes that should be set on
the newly derived cryptographic object.
"""
def __init__(self,
object_type=None,
unique_identifiers=None,
derivation_method=None,
derivation_parameters=None,
template_attribute=None):
"""
Construct a DeriveKey request payload struct.
Args:
object_type (enum): An ObjectType enumeration specifying the type
of the object to derive. Optional, defaults to None. Required
for read/write.
unique_identifiers (list): A list of strings representing the IDs
of managed objects (e.g., symmetric keys) to be used for
derivation. Optional, defaults to None. At least one value is
required for read/write.
derivation_method (enum): A DerivationMethod enumeration
specifying the type of derivation function to use (e.g.,
PBKDF2). Optional, defaults to None. Required for read/write.
derivation_parameters (DerivationParameters): A structure
containing cryptographic settings relevant for the derivation
method. Optional, defaults to None. Required for read/write.
template_attribute (TemplateAttribute): A structure containing a
set of attributes (e.g., cryptographic algorithm,
cryptographic length) that should be set on the newly derived
cryptographic object. Optional, defaults to None. Required
for read/write.
"""
super(DeriveKeyRequestPayload, self).__init__(
enums.Tags.REQUEST_PAYLOAD
)
self._object_type = None
self._unique_identifiers = None
self._derivation_method = None
self._derivation_parameters = None
self._template_attribute = None
self.object_type = object_type
self.unique_identifiers = unique_identifiers
self.derivation_method = derivation_method
self.derivation_parameters = derivation_parameters
self.template_attribute = template_attribute
@property
def object_type(self):
if self._object_type:
return self._object_type.value
else:
return None
@object_type.setter
def object_type(self, value):
if value is None:
self._object_type = None
elif isinstance(value, enums.ObjectType):
self._object_type = primitives.Enumeration(
enums.ObjectType,
value=value,
tag=enums.Tags.OBJECT_TYPE
)
else:
raise TypeError("Object type must be an ObjectType enumeration.")
@property
def unique_identifiers(self):
if self._unique_identifiers:
unique_identifiers = []
for i in self._unique_identifiers:
unique_identifiers.append(i.value)
return unique_identifiers
else:
return None
@unique_identifiers.setter
def unique_identifiers(self, value):
if value is None:
self._unique_identifiers = None
elif isinstance(value, list):
unique_identifiers = []
for i in value:
if isinstance(i, six.string_types):
unique_identifiers.append(
primitives.TextString(
value=i,
tag=enums.Tags.UNIQUE_IDENTIFIER
)
)
else:
raise TypeError(
"Unique identifiers must be a list of strings."
)
self._unique_identifiers = unique_identifiers
else:
raise TypeError("Unique identifiers must be a list of strings.")
@property
def derivation_method(self):
if self._derivation_method:
return self._derivation_method.value
else:
return None
@derivation_method.setter
def derivation_method(self, value):
if value is None:
self._derivation_method = None
elif isinstance(value, enums.DerivationMethod):
self._derivation_method = primitives.Enumeration(
enums.DerivationMethod,
value=value,
tag=enums.Tags.DERIVATION_METHOD
)
else:
raise TypeError(
"Derivation method must be a DerivationMethod enumeration."
)
@property
def derivation_parameters(self):
if self._derivation_parameters:
return self._derivation_parameters
else:
return None
@derivation_parameters.setter
def derivation_parameters(self, value):
if value is None:
self._derivation_parameters = None
elif isinstance(value, attributes.DerivationParameters):
self._derivation_parameters = value
else:
raise TypeError(
"Derivation parameters must be a DerivationParameters "
"structure."
)
@property
def template_attribute(self):
if self._template_attribute:
return self._template_attribute
else:
return None
@template_attribute.setter
def template_attribute(self, value):
if value is None:
self._template_attribute = None
elif isinstance(value, objects.TemplateAttribute):
self._template_attribute = value
else:
raise TypeError(
"Template attribute must be a TemplateAttribute structure."
)
def read(self, input_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the DeriveKey request payload and decode it
into its constituent parts.
Args:
input_buffer (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the data attribute is missing from the
encoded payload.
"""
super(DeriveKeyRequestPayload, self).read(
input_buffer,
kmip_version=kmip_version
)
local_buffer = utils.BytearrayStream(input_buffer.read(self.length))
if self.is_tag_next(enums.Tags.OBJECT_TYPE, local_buffer):
self._object_type = primitives.Enumeration(
enums.ObjectType,
tag=enums.Tags.OBJECT_TYPE
)
self._object_type.read(local_buffer, kmip_version=kmip_version)
else:
raise exceptions.InvalidKmipEncoding(
"The DeriveKey request payload encoding is missing the object "
"type."
)
unique_identifiers = []
while self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_buffer):
unique_identifier = primitives.TextString(
tag=enums.Tags.UNIQUE_IDENTIFIER
)
unique_identifier.read(local_buffer, kmip_version=kmip_version)
unique_identifiers.append(unique_identifier)
if not unique_identifiers:
raise exceptions.InvalidKmipEncoding(
"The DeriveKey request payload encoding is missing the unique "
"identifiers."
)
else:
self._unique_identifiers = unique_identifiers
if self.is_tag_next(enums.Tags.DERIVATION_METHOD, local_buffer):
self._derivation_method = primitives.Enumeration(
enums.DerivationMethod,
tag=enums.Tags.DERIVATION_METHOD
)
self._derivation_method.read(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidKmipEncoding(
"The DeriveKey request payload encoding is missing the "
"derivation method."
)
if self.is_tag_next(enums.Tags.DERIVATION_PARAMETERS, local_buffer):
self._derivation_parameters = attributes.DerivationParameters()
self._derivation_parameters.read(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidKmipEncoding(
"The DeriveKey request payload encoding is missing the "
"derivation parameters."
)
if kmip_version < enums.KMIPVersion.KMIP_2_0:
if self.is_tag_next(enums.Tags.TEMPLATE_ATTRIBUTE, local_buffer):
self._template_attribute = objects.TemplateAttribute()
self._template_attribute.read(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidKmipEncoding(
"The DeriveKey request payload encoding is missing the "
"template attribute."
)
else:
if self.is_tag_next(enums.Tags.ATTRIBUTES, local_buffer):
attrs = objects.Attributes()
attrs.read(local_buffer, kmip_version=kmip_version)
value = objects.convert_attributes_to_template_attribute(
attrs
)
self._template_attribute = value
else:
raise exceptions.InvalidKmipEncoding(
"The DeriveKey request payload encoding is missing the "
"attributes structure."
)
self.is_oversized(local_buffer)
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the DeriveKey request payload to a stream.
Args:
output_buffer (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the data attribute is not defined.
"""
local_buffer = utils.BytearrayStream()
if self._object_type:
self._object_type.write(local_buffer, kmip_version=kmip_version)
else:
raise exceptions.InvalidField(
"The DeriveKey request payload is missing the object type "
"field."
)
if self._unique_identifiers:
for unique_identifier in self._unique_identifiers:
unique_identifier.write(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidField(
"The DeriveKey request payload is missing the unique "
"identifiers field."
)
if self._derivation_method:
self._derivation_method.write(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidField(
"The DeriveKey request payload is missing the derivation "
"method field."
)
if self._derivation_parameters:
self._derivation_parameters.write(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidField(
"The DeriveKey request payload is missing the derivation "
"parameters field."
)
if kmip_version < enums.KMIPVersion.KMIP_2_0:
if self._template_attribute:
self._template_attribute.write(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidField(
"The DeriveKey request payload is missing the template "
"attribute field."
)
else:
if self._template_attribute:
attrs = objects.convert_template_attribute_to_attributes(
self._template_attribute
)
attrs.write(local_buffer, kmip_version=kmip_version)
else:
raise exceptions.InvalidField(
"The DeriveKey request payload is missing the template "
"attribute field."
)
self.length = local_buffer.length()
super(DeriveKeyRequestPayload, self).write(
output_buffer,
kmip_version=kmip_version
)
output_buffer.write(local_buffer.buffer)
def __eq__(self, other):
if isinstance(other, DeriveKeyRequestPayload):
if self.object_type != other.object_type:
return False
elif self.unique_identifiers != other.unique_identifiers:
return False
elif self.derivation_method != other.derivation_method:
return False
elif self.derivation_parameters != other.derivation_parameters:
return False
elif self.template_attribute != other.template_attribute:
return False
else:
return True
else:
return NotImplemented
def __ne__(self, other):
if isinstance(other, DeriveKeyRequestPayload):
return not (self == other)
else:
return NotImplemented
def __repr__(self):
args = ", ".join([
"object_type={0}".format(self.object_type),
"unique_identifiers={0}".format(self.unique_identifiers),
"derivation_method={0}".format(self.derivation_method),
"derivation_parameters={0}".format(
repr(self.derivation_parameters)
),
"template_attribute={0}".format(repr(self.template_attribute))
])
return "DeriveKeyRequestPayload({0})".format(args)
def __str__(self):
return str({
"object_type": self.object_type,
"unique_identifiers": self.unique_identifiers,
"derivation_method": self.derivation_method,
"derivation_parameters": self.derivation_parameters,
"template_attribute": self.template_attribute
})
class DeriveKeyResponsePayload(primitives.Struct):
"""
A response payload for the DeriveKey operation.
Attributes:
unique_identifier: The unique ID of the newly derived cryptographic
object.
template_attribute: A collection of attributes that were implicitly
set by the server on the newly derived cryptographic object.
"""
def __init__(self,
unique_identifier=None,
template_attribute=None):
"""
Construct a DeriveKey response payload struct.
Args:
unique_identifier (string): A string representing the ID of the
newly derived managed object. Optional, defaults to None. At
least one value is required for encoding and decoding.
template_attribute (TemplateAttribute): A structure containing a
set of attributes (e.g., cryptographic algorithm,
cryptographic length) implicitly set by the server on the
newly derived cryptographic object. Optional, defaults to
None.
"""
super(DeriveKeyResponsePayload, self).__init__(
enums.Tags.RESPONSE_PAYLOAD
)
self._unique_identifier = None
self._template_attribute = None
self.unique_identifier = unique_identifier
self.template_attribute = template_attribute
@property
def unique_identifier(self):
if self._unique_identifier:
return self._unique_identifier.value
else:
return None
@unique_identifier.setter
def unique_identifier(self, value):
if value is None:
self._unique_identifier = None
elif isinstance(value, six.string_types):
self._unique_identifier = primitives.TextString(
value=value,
tag=enums.Tags.UNIQUE_IDENTIFIER
)
else:
raise TypeError("Unique identifier must be a string.")
@property
def template_attribute(self):
if self._template_attribute:
return self._template_attribute
else:
return None
@template_attribute.setter
def template_attribute(self, value):
if value is None:
self._template_attribute = None
elif isinstance(value, objects.TemplateAttribute):
self._template_attribute = value
else:
raise TypeError(
"Template attribute must be a TemplateAttribute structure."
)
def read(self, input_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the DeriveKey response payload and decode it
into its constituent parts.
Args:
input_buffer (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the data attribute is missing from the
encoded payload.
"""
super(DeriveKeyResponsePayload, self).read(
input_buffer,
kmip_version=kmip_version
)
local_buffer = utils.BytearrayStream(input_buffer.read(self.length))
if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_buffer):
self._unique_identifier = primitives.TextString(
tag=enums.Tags.UNIQUE_IDENTIFIER
)
self._unique_identifier.read(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidKmipEncoding(
"The DeriveKey response payload encoding is missing the "
"unique identifier."
)
if kmip_version < enums.KMIPVersion.KMIP_2_0:
if self.is_tag_next(enums.Tags.TEMPLATE_ATTRIBUTE, local_buffer):
self._template_attribute = objects.TemplateAttribute()
self._template_attribute.read(
local_buffer,
kmip_version=kmip_version
)
self.is_oversized(local_buffer)
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the DeriveKey response payload to a stream.
Args:
output_buffer (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the data attribute is not defined.
"""
local_buffer = utils.BytearrayStream()
if self._unique_identifier:
self._unique_identifier.write(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidField(
"The DeriveKey response payload is missing the unique "
"identifier field."
)
if kmip_version < enums.KMIPVersion.KMIP_2_0:
if self._template_attribute:
self._template_attribute.write(
local_buffer,
kmip_version=kmip_version
)
self.length = local_buffer.length()
super(DeriveKeyResponsePayload, self).write(
output_buffer,
kmip_version=kmip_version
)
output_buffer.write(local_buffer.buffer)
def __eq__(self, other):
if isinstance(other, DeriveKeyResponsePayload):
if self.unique_identifier != other.unique_identifier:
return False
elif self.template_attribute != other.template_attribute:
return False
else:
return True
else:
return NotImplemented
def __ne__(self, other):
if isinstance(other, DeriveKeyResponsePayload):
return not (self == other)
else:
return NotImplemented
def __repr__(self):
args = ", ".join([
"unique_identifier='{0}'".format(self.unique_identifier),
"template_attribute={0}".format(repr(self.template_attribute))
])
return "DeriveKeyResponsePayload({0})".format(args)
def __str__(self):
return str({
"unique_identifier": self.unique_identifier,
"template_attribute": self.template_attribute
})
| 36.807631 | 79 | 0.597184 |
acf21b0b630d1cddcdcf751576d8f1569a37cf2d | 2,318 | py | Python | reading-notes/CorePython/src/ftp.py | ASMlover/study | 5878f862573061f94c5776a351e30270dfd9966a | [
"BSD-2-Clause"
] | 22 | 2015-05-18T07:04:36.000Z | 2021-08-02T03:01:43.000Z | reading-notes/CorePython/src/ftp.py | ASMlover/study | 5878f862573061f94c5776a351e30270dfd9966a | [
"BSD-2-Clause"
] | 1 | 2017-08-31T22:13:57.000Z | 2017-09-05T15:00:25.000Z | reading-notes/CorePython/src/ftp.py | ASMlover/study | 5878f862573061f94c5776a351e30270dfd9966a | [
"BSD-2-Clause"
] | 6 | 2015-06-06T07:16:12.000Z | 2021-07-06T13:45:56.000Z | # Copyright (c) 2014 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import ftplib
import os
import socket
HOST = 'ftp.mozilla.org'
DIRN = 'pub/mozilla.org/webtools'
FILE = 'bugzilla-LATEST.tar.gz'
def main():
try:
f = ftplib.FTP(HOST)
except (socket.error, socket.gaierror), e:
print 'ERROR: cannot reach "%s"' % HOST
return
print '*** Connected to host "%s"' % HOST
try:
f.login()
except ftplib.error_perm:
print 'ERROR: cannot login anonymously'
f.quit()
return
print '*** Logined in as "anonymously"'
try:
f.cwd(DIRN)
except ftplib.error_perm:
print 'ERROR: cannot cd to "%s"' % DIRN
f.quit()
return
print '*** Changed to "%s" folder' % DIRN
try:
f.retrbinary('RETR %s' % FILE, open(FILE, 'wb').write)
except ftplib.error_perm:
print 'ERROR: cannot read file "%s"' % FILE
os.unlink(FILE)
else:
print '*** Downloaded "%s" to cwd' % FILE
f.quit()
return
if __name__ == '__main__':
main()
| 28.975 | 70 | 0.709232 |
acf21c5b5125b1d6153a1af52adf0350cded0d5c | 6,849 | py | Python | models/cvae_new.py | bunthet01/srl_zoo | 0b077be6c114bbc2bfaea392afdd4f49442b41ab | [
"MIT"
] | 3 | 2019-10-09T09:52:27.000Z | 2021-01-01T21:12:44.000Z | models/cvae_new.py | bunthet01/srl_zoo | 0b077be6c114bbc2bfaea392afdd4f49442b41ab | [
"MIT"
] | null | null | null | models/cvae_new.py | bunthet01/srl_zoo | 0b077be6c114bbc2bfaea392afdd4f49442b41ab | [
"MIT"
] | null | null | null | # cvae.py is designed to use: "action" or ("action", "target position") as condition. cvae_new.py is designed to use ("target position" and "robot position") as condition
from __future__ import print_function, division, absolute_import
from torch.autograd import Variable
import torch as th
import numpy as np
import torch.nn as nn
from torch.nn import functional as F
from torchsummary import summary
try:
# relative import: when executing as a package: python -m ...
from .base_models import BaseModelVAE
from ..losses.losses import kullbackLeiblerLoss, generationLoss
from ..preprocessing.utils import one_hot, gaussian_target
import sys
sys.path.append("..")
from real_robots.constants import MIN_X, MAX_X, MIN_Y, MAX_Y # working with "Omnirobot-env"
except:
# absolute import: when executing directly: python train.py ...
from models.base_models import BaseModelVAE
from losses.losses import kullbackLeiblerLoss, generationLoss, KLDloss, BCEloss
from preprocessing.utils import one_hot, gaussian_target
import sys
sys.path.append("..")
from real_robots.constants import MIN_X, MAX_X, MIN_Y, MAX_Y # working with "Omnirobot-env"
class CNNCVAE_NEW(BaseModelVAE):
"""
Convolutional neural network for Conditional variational auto-encoder.
Two conditions are used: robot_position and target_position.
"""
def __init__(self, state_dim=3, img_shape=(3, 224, 224), device='cpu'):
super(CNNCVAE_NEW, self).__init__(state_dim=state_dim, img_shape=img_shape)
outshape = summary(self.encoder_conv, img_shape, show=False) # [-1, channels, high, width]
self.img_height, self.img_width = outshape[-2:]
self.device = device
self.encoder_fc1 = nn.Linear(self.img_height * self.img_width * 64, state_dim)
self.encoder_fc2 = nn.Linear(self.img_height * self.img_width * 64, state_dim)
self.decoder_fc = nn.Sequential(
nn.Linear(state_dim+4, self.img_height * self.img_width * 64)
)
def conv3x3(in_planes, out_planes, stride=1):
""""
From PyTorch Resnet implementation
3x3 convolution with padding
:param in_planes: (int)
:param out_planes: (int)
:param stride: (int)
"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.encoder_conv_new = nn.Sequential(
nn.Conv2d(self.img_shape[0]+2, 64, kernel_size=7, stride=2, padding=3, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
conv3x3(in_planes=64, out_planes=64, stride=1),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d(kernel_size=3, stride=2),
conv3x3(in_planes=64, out_planes=64, stride=2),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d(kernel_size=3, stride=2)
)
def encode_cvae(self, x, t, r ):
"""
:param x: (th.Tensor)
:param t: (th.Tensor) target_position
:param r: (th.Tensor) robot_position
:return: (th.Tensor)
"""
target_chanel = gaussian_target(self.img_shape, t, MAX_X, MIN_X, MAX_Y, MIN_Y).float().to(self.device)
robot_chanel = gaussian_target(self.img_shape, r, MAX_X, MIN_X, MAX_Y, MIN_Y).float().to(self.device) # gaussian_target is the same as gaussian_robot
x = th.cat([x,target_chanel, robot_chanel], 1)
x = self.encoder_conv_new(x)
x = x.view(x.size(0), -1)
return self.encoder_fc1(x), self.encoder_fc2(x)
def decode_cvae(self, z, t, r):
"""
:param z: (th.Tensor)
:param t: (th.Tensor) target_position
:param r: (th.Tensor) robot_position
:return: (th.Tensor)
"""
concat_input = th.cat([z, t.float(), r.float()], 1)
out_put_1 = self.decoder_fc(concat_input)
out_put_2 = out_put_1.view(out_put_1.size(0), 64,self.img_height, self.img_width)
return self.decoder_conv(out_put_2)
def compute_tensor_cvae(self, x, t, r):
"""
:param x: (th.Tensor)
:param t: (th.Tensor) target_position
:param r: (th.Tensor) robot_position
:return: (th.Tensor)
"""
input_shape = x.size()
mu, logvar = self.encode_cvae(x, t, r)
z = self.reparameterize(mu, logvar)
decoded = self.decode_cvae(z, t, r).view(input_shape)
return decoded, mu, logvar
class CVAE_NEW_Trainer(nn.Module):
def __init__(self, state_dim=2, img_shape=(3, 224, 224),device='cpu'):
super().__init__()
self.state_dim = state_dim
self.img_shape = img_shape
self.device = device
def build_model(self, model_type='custom_cnn_2'):
assert model_type in ['custom_cnn_2'], 'The model must be custom_cnn_2 '
# [TODO] add other models
self.model_type = model_type
if model_type == 'custom_cnn_2':
self.model = CNNCVAE_NEW(self.state_dim, self.img_shape, self.device)
else:
raise NotImplementedError(
"model type: ({}) not supported yet.".format(model_type))
def train_on_batch(self, obs, next_obs,target_pos, next_target_pos,robot_pos, next_robot_pos, optimizer, loss_manager, valid_mode, device, beta, c):
(decoded_obs, mu, logvar), (next_decoded_obs, next_mu, next_logvar) = self.model.compute_tensor_cvae(obs,target_pos,robot_pos ), \
self.model.compute_tensor_cvae(next_obs, next_target_pos, next_robot_pos)
kullbackLeiblerLoss(mu, next_mu, logvar, next_logvar, loss_manager, beta, c)
generationLoss(decoded_obs, next_decoded_obs, obs, next_obs, weight=100.0, loss_manager=loss_manager)
if not valid_mode:
loss_manager.updateLossHistory()
loss = loss_manager.computeTotalLoss()
if not valid_mode:
loss.backward()
optimizer.step()
else:
pass
loss = loss.item()
return loss
def reconstruct(self, x, tar_pos, robot_pos):
return self.model.decode_cvae(self.model.encode_cvae(x, tar_pos, robot_pos)[0], tar_pos, robot_pos)
def encode(self, x, tar_pos, robot_pos):
return self.model.encode_cvae(x, tar_pos, robot_pos)
def decode(self, z, tar_pos, robot_pos):
return self.model.decode_cvae(z, tar_pos, robot_pos)
def forward(self, x, tar_pos, robot_pos):
return self.model.encode_cvae(x, tar_pos, robot_pos)[0]
if __name__ == "__main__":
print("Start")
img_shape = (3, 64, 64)
model = CNNCVAE(state_dim=2, img_shape=img_shape)
A = summary(model, img_shape)
| 41.011976 | 171 | 0.642868 |
acf21c912f8161547c3bb53a5679ff7ad292ea61 | 584 | py | Python | cracking_the_coding_interview_qs/17.26/sparse_similarity_test.py | angelusualle/algorithms | 86286a49db2a755bc57330cb455bcbd8241ea6be | [
"Apache-2.0"
] | null | null | null | cracking_the_coding_interview_qs/17.26/sparse_similarity_test.py | angelusualle/algorithms | 86286a49db2a755bc57330cb455bcbd8241ea6be | [
"Apache-2.0"
] | null | null | null | cracking_the_coding_interview_qs/17.26/sparse_similarity_test.py | angelusualle/algorithms | 86286a49db2a755bc57330cb455bcbd8241ea6be | [
"Apache-2.0"
] | null | null | null | import unittest
from sparse_similarity import sparse_similarity
class Test_Case_Sparse_Similarity(unittest.TestCase):
def test_sparse_similarity(self):
self.assertListEqual(sparse_similarity(
{
'ID1': {1,3,4,5,6,7},
'ID2': {23, 4, 6, 7, 8, 9, 10},
'ID3': {8, 9, 10 , 11, 23},
'ID4': {67, 34, 52, 699, 800},
}), ['ID1 to ID2 : 0.300000', 'ID2 to ID3 : 0.500000', 'ID2 to ID1 : 0.300000', 'ID3 to ID2 : 0.500000'])
#should be
if __name__ == '__main__':
unittest.main() | 36.5 | 117 | 0.541096 |
acf21cf634ed0a70b6fac343acacdb702cadd987 | 629 | py | Python | manage.py | sudhanshu150/Easy-pay-mobile-payment-system- | 0a9134849dd1d922d783ddfe8468a826b08652eb | [
"MIT"
] | null | null | null | manage.py | sudhanshu150/Easy-pay-mobile-payment-system- | 0a9134849dd1d922d783ddfe8468a826b08652eb | [
"MIT"
] | null | null | null | manage.py | sudhanshu150/Easy-pay-mobile-payment-system- | 0a9134849dd1d922d783ddfe8468a826b08652eb | [
"MIT"
] | 1 | 2021-03-09T11:58:07.000Z | 2021-03-09T11:58:07.000Z | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mypayment.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.590909 | 73 | 0.683625 |
acf21db0ece9ef31a6a6ec5524b6df964b7640ee | 7,294 | py | Python | mlprodict/npy/onnx_numpy_wrapper.py | sdpython/mlprodic | 9367dacc91d35ec670c8a8a76708300a75bbc993 | [
"MIT"
] | 32 | 2018-03-04T23:33:30.000Z | 2022-03-10T19:15:06.000Z | mlprodict/npy/onnx_numpy_wrapper.py | sdpython/mlprodic | 9367dacc91d35ec670c8a8a76708300a75bbc993 | [
"MIT"
] | 184 | 2017-11-30T14:10:35.000Z | 2022-02-21T08:29:31.000Z | mlprodict/npy/onnx_numpy_wrapper.py | sdpython/mlprodic | 9367dacc91d35ec670c8a8a76708300a75bbc993 | [
"MIT"
] | 9 | 2019-07-24T13:18:00.000Z | 2022-03-07T04:08:07.000Z | """
@file
@brief Wraps :epkg:`numpy` functions into :epkg:`onnx`.
.. versionadded:: 0.6
"""
import warnings
from .onnx_version import FctVersion
from .onnx_numpy_annotation import get_args_kwargs
from .onnx_numpy_compiler import OnnxNumpyCompiler
class _created_classes:
"""
Class to store all dynamic classes created by wrappers.
"""
def __init__(self):
self.stored = {}
def append(self, name, cl):
"""
Adds a class into `globals()` to enable pickling on dynamic
classes.
"""
if name in self.stored:
warnings.warn( # pragma: no cover
"Class %r overwritten in\n%r\n---\n%r" % (
name, ", ".join(sorted(self.stored)), cl),
RuntimeWarning)
self.stored[name] = cl
globals()[name] = cl
_created_classes_inst = _created_classes()
class wrapper_onnxnumpy:
"""
Intermediate wrapper to store a pointer
on the compiler (type: @see cl OnnxNumpyCompiler).
:param compiled: instance of @see cl OnnxNumpyCompiler
.. versionadded:: 0.6
"""
def __init__(self, compiled):
self.compiled = compiled
def __call__(self, *args, **kwargs):
"""
Calls the compiled function with arguments `args`.
"""
return self.compiled(*args, **kwargs)
def __getstate__(self):
"""
Serializes everything but the function which generates
the ONNX graph, not needed anymore.
"""
return dict(compiled=self.compiled)
def __setstate__(self, state):
"""
Serializes everything but the function which generates
the ONNX graph, not needed anymore.
"""
self.compiled = state['compiled']
def onnxnumpy(op_version=None, runtime=None, signature=None):
"""
Decorator to declare a function implemented using
:epkg:`numpy` syntax but executed with :epkg:`ONNX`
operators.
:param op_version: :epkg:`ONNX` opset version
:param runtime: `'onnxruntime'` or one implemented by @see cl OnnxInference
:param signature: it should be used when the function
is not annoatated.
Equivalent to `onnxnumpy(arg)(foo)`.
.. versionadded:: 0.6
"""
def decorator_fct(fct):
compiled = OnnxNumpyCompiler(
fct, op_version=op_version, runtime=runtime,
signature=signature)
name = "onnxnumpy_%s_%s_%s" % (fct.__name__, str(op_version), runtime)
newclass = type(
name, (wrapper_onnxnumpy,),
{'__doc__': fct.__doc__, '__name__': name})
_created_classes_inst.append(name, newclass)
return newclass(compiled)
return decorator_fct
def onnxnumpy_default(fct):
"""
Decorator with options to declare a function implemented
using :epkg:`numpy` syntax but executed with :epkg:`ONNX`
operators.
:param fct: function to wrap
.. versionadded:: 0.6
"""
return onnxnumpy()(fct)
class wrapper_onnxnumpy_np:
"""
Intermediate wrapper to store a pointer
on the compiler (type: @see cl OnnxNumpyCompiler)
supporting multiple signatures.
.. versionadded:: 0.6
"""
def __init__(self, **kwargs):
self.fct = kwargs['fct']
self.signature = kwargs['signature']
self.fctsig = kwargs.get('fctsig', None)
self.args, self.kwargs = get_args_kwargs(
self.fct,
0 if self.signature is None else self.signature.n_optional)
self.data = kwargs
self.signed_compiled = {}
def __getstate__(self):
"""
Serializes everything but the function which generates
the ONNX graph, not needed anymore.
"""
data_copy = {k: v for k, v in self.data.items() if k != 'fct'}
return dict(signature=self.signature, args=self.args,
kwargs=self.kwargs, data=data_copy,
signed_compiled=self.signed_compiled)
def __setstate__(self, state):
"""
Restores serialized data.
"""
for k, v in state.items():
setattr(self, k, v)
def __getitem__(self, dtype):
"""
Returns the instance of @see cl wrapper_onnxnumpy
mapped to *dtype*.
:param dtype: numpy dtype corresponding to the input dtype
of the function
:return: instance of @see cl wrapper_onnxnumpy
"""
if not isinstance(dtype, FctVersion):
raise TypeError( # pragma: no cover
"dtype must be of type 'FctVersion' not %s: %s." % (
type(dtype), dtype))
if dtype not in self.signed_compiled:
self._populate(dtype)
key = dtype
else:
key = dtype
return self.signed_compiled[key]
def __call__(self, *args, **kwargs):
"""
Calls the compiled function assuming the type of the first
tensor in *args* defines the templated version of the function
to convert into *ONNX*.
"""
if len(self.kwargs) == 0:
others = None
else:
others = tuple(kwargs.get(k, self.kwargs[k]) for k in self.kwargs)
key = FctVersion( # pragma: no cover
tuple(a if (a is None or hasattr(a, 'fit'))
else a.dtype.type for a in args),
others)
return self[key](*args)
def _populate(self, version):
"""
Creates the appropriate runtime for function *fct*
"""
compiled = OnnxNumpyCompiler(
fct=self.data["fct"], op_version=self.data["op_version"],
runtime=self.data["runtime"], signature=self.data["signature"],
version=version, fctsig=self.data.get('fctsig', None))
name = "onnxnumpy_np_%s_%s_%s_%s" % (
self.data["fct"].__name__, str(self.data["op_version"]),
self.data["runtime"], version.as_string())
newclass = type(
name, (wrapper_onnxnumpy,),
{'__doc__': self.data["fct"].__doc__, '__name__': name})
self.signed_compiled[version] = newclass(compiled)
def _validate_onnx_data(self, X):
return X
def onnxnumpy_np(op_version=None, runtime=None, signature=None):
"""
Decorator to declare a function implemented using
:epkg:`numpy` syntax but executed with :epkg:`ONNX`
operators.
:param op_version: :epkg:`ONNX` opset version
:param runtime: `'onnxruntime'` or one implemented by @see cl OnnxInference
:param signature: it should be used when the function
is not annoatated.
Equivalent to `onnxnumpy(arg)(foo)`.
.. versionadded:: 0.6
"""
def decorator_fct(fct):
name = "onnxnumpy_nb_%s_%s_%s" % (
fct.__name__, str(op_version), runtime)
newclass = type(
name, (wrapper_onnxnumpy_np,), {
'__doc__': fct.__doc__,
'__name__': name,
'__getstate__': wrapper_onnxnumpy_np.__getstate__,
'__setstate__': wrapper_onnxnumpy_np.__setstate__})
_created_classes_inst.append(name, newclass)
return newclass(
fct=fct, op_version=op_version, runtime=runtime,
signature=signature)
return decorator_fct
| 30.90678 | 79 | 0.602961 |
acf2202a4d6fad05ca388b123c569a66e44540a7 | 2,918 | py | Python | plot_covid19_fr/incidence_versus_tests_nb.py | paugier/plot-covid19-fr | f0e612f7d0a64c3c1b653d4d1bf2f1e0b1aef3bd | [
"BSD-3-Clause"
] | null | null | null | plot_covid19_fr/incidence_versus_tests_nb.py | paugier/plot-covid19-fr | f0e612f7d0a64c3c1b653d4d1bf2f1e0b1aef3bd | [
"BSD-3-Clause"
] | null | null | null | plot_covid19_fr/incidence_versus_tests_nb.py | paugier/plot-covid19-fr | f0e612f7d0a64c3c1b653d4d1bf2f1e0b1aef3bd | [
"BSD-3-Clause"
] | 1 | 2020-10-13T09:25:29.000Z | 2020-10-13T09:25:29.000Z | import matplotlib.pyplot as plt
import ipywidgets as widgets
from .plot_incidence_versus_tests import plot_incidence_vs_tests, date_file
from .util import min_incidence_default
class StatePlotIncidenceVersusTests:
def __init__(self, min_incidence=min_incidence_default):
self.index_friday = 0
self.last_days = False
self.min_incidence = min_incidence
self.max_incidence = None
self.ax = None
options = [
"Derniers jours",
"Dernière semaine",
"Avant dernière semaine",
"Avant avant dernière semaine",
]
self.widget_date = widgets.Dropdown(
options=options, value=options[1], description="Période :"
)
self.widget_date.observe(self.handle_change_date)
self.widget_min_incidence = widgets.IntText(
value=min_incidence, description="Minimum:", disabled=False
)
self.widget_max_incidence = widgets.IntText(
value=2000, description="Maximum:", disabled=False
)
self.widget_button = widgets.Button(
description="Retracer",
disabled=False,
button_style="", # 'success', 'info', 'warning', 'danger' or ''
tooltip="Retracer la figure avec les nouvelles données d'entrée",
icon="sync-alt", # (FontAwesome names without the `fa-` prefix)
)
self.widget_button.on_click(self.sync)
self.layout_inputs = widgets.TwoByTwoLayout(
top_left=self.widget_max_incidence,
bottom_left=self.widget_min_incidence,
top_right=self.widget_button,
)
def set_ax(self, ax):
self.ax = ax
def create_default_ax(self):
fig, self.ax = plt.subplots(figsize=(9, 5))
def handle_change_date(self, change):
if change["name"] != "index" or change["type"] != "change":
return
new = change["new"]
if change["old"] == new:
return
if new == 0:
self.last_days = True
self.index_friday = 0
else:
self.last_days = False
self.index_friday = -new + 1
self.plot()
def sync(self, button):
self.min_incidence = self.widget_min_incidence.value
self.max_incidence = self.widget_max_incidence.value
self.plot()
def plot(self):
if self.ax is None:
self.create_default_ax()
else:
self.ax.clear()
plot_incidence_vs_tests(
index_friday=self.index_friday,
last_days=self.last_days,
ax=self.ax,
min_incidence=self.min_incidence,
max_incidence=self.max_incidence,
)
window_title = f"incidence_vs_tests{date_file}"
self.ax.figure.canvas.manager.set_window_title(window_title)
self.ax.figure.tight_layout()
| 30.082474 | 77 | 0.605552 |
acf220ab14b6330c3750fac0305f0925cc6f5b3e | 1,301 | py | Python | tapis_cli/commands/taccapis/v2/apim/models/token.py | eho-tacc/tapis-cli | 31b86135aa0c2b41afe0c0f7c50adb902c62076b | [
"BSD-3-Clause"
] | null | null | null | tapis_cli/commands/taccapis/v2/apim/models/token.py | eho-tacc/tapis-cli | 31b86135aa0c2b41afe0c0f7c50adb902c62076b | [
"BSD-3-Clause"
] | null | null | null | tapis_cli/commands/taccapis/v2/apim/models/token.py | eho-tacc/tapis-cli | 31b86135aa0c2b41afe0c0f7c50adb902c62076b | [
"BSD-3-Clause"
] | null | null | null | """Data model and functions for Tapis token
"""
from tapis_cli.commands.taccapis.v2 import SERVICE_VERSION
from tapis_cli.commands.taccapis import TapisModel
from tapis_cli.display import Verbosity
from tapis_cli.search import argtype, argmod
__all__ = ['Token', 'API_NAME', 'SERVICE_VERSION']
API_NAME = 'token'
class Token(TapisModel):
"""Model of a Tapis application
"""
id_display_name = 'TOKEN'
SEARCH_ARGS = [
# JSON_field, type, verbosity, mods_allowed, default_mod, choices, override_option, searchable
("scope", argtype.STRING, Verbosity.RECORD, argmod.STRING_DEFAULTS,
argmod.DEFAULT, None, None, False),
("token_type", argtype.STRING, Verbosity.RECORD,
argmod.STRING_DEFAULTS, argmod.DEFAULT, None, None, False),
("expires_at", argtype.DATETIME, Verbosity.BRIEF,
argmod.STRING_DEFAULTS, argmod.DEFAULT, None, None, False),
("expires_in", argtype.INTEGER, Verbosity.RECORD,
argmod.STRING_DEFAULTS, argmod.DEFAULT, None, None, False),
("access_token", argtype.STRING, Verbosity.BRIEF,
argmod.STRING_DEFAULTS, argmod.DEFAULT, None, None, False),
("refresh_token", argtype.STRING, Verbosity.BRIEF,
argmod.STRING_DEFAULTS, argmod.DEFAULT, None, None, False)
]
| 39.424242 | 102 | 0.703305 |
acf22128d49378f173aaa0744dbe8e974fa39caa | 43 | py | Python | tests/datatypes/base_test.py | Paul-Kim/firestore | 4933f1a9f307539a8cef4ea80869c8821a92bc8d | [
"MIT"
] | 7 | 2019-04-11T19:39:59.000Z | 2021-02-02T10:28:27.000Z | tests/datatypes/base_test.py | Paul-Kim/firestore | 4933f1a9f307539a8cef4ea80869c8821a92bc8d | [
"MIT"
] | null | null | null | tests/datatypes/base_test.py | Paul-Kim/firestore | 4933f1a9f307539a8cef4ea80869c8821a92bc8d | [
"MIT"
] | 4 | 2019-04-22T04:34:47.000Z | 2019-11-25T07:19:23.000Z | """
See connection_test for base tests
"""
| 10.75 | 34 | 0.697674 |
acf221549b98585f9c153a89d0a3a2c0836a9597 | 23,955 | py | Python | setuptools/tests/test_config.py | paulmon/setuptools | c2f72efd261bf89372dfa27b1c115012e74bd525 | [
"MIT"
] | null | null | null | setuptools/tests/test_config.py | paulmon/setuptools | c2f72efd261bf89372dfa27b1c115012e74bd525 | [
"MIT"
] | null | null | null | setuptools/tests/test_config.py | paulmon/setuptools | c2f72efd261bf89372dfa27b1c115012e74bd525 | [
"MIT"
] | null | null | null | import contextlib
import pytest
from distutils.errors import DistutilsOptionError, DistutilsFileError
from mock import patch
from setuptools.dist import Distribution, _Distribution
from setuptools.config import ConfigHandler, read_configuration
from . import py2_only, py3_only
class ErrConfigHandler(ConfigHandler):
"""Erroneous handler. Fails to implement required methods."""
def make_package_dir(name, base_dir, ns=False):
dir_package = base_dir
for dir_name in name.split('/'):
dir_package = dir_package.mkdir(dir_name)
init_file = None
if not ns:
init_file = dir_package.join('__init__.py')
init_file.write('')
return dir_package, init_file
def fake_env(tmpdir, setup_cfg, setup_py=None, package_path='fake_package'):
if setup_py is None:
setup_py = (
'from setuptools import setup\n'
'setup()\n'
)
tmpdir.join('setup.py').write(setup_py)
config = tmpdir.join('setup.cfg')
config.write(setup_cfg)
package_dir, init_file = make_package_dir(package_path, tmpdir)
init_file.write(
'VERSION = (1, 2, 3)\n'
'\n'
'VERSION_MAJOR = 1'
'\n'
'def get_version():\n'
' return [3, 4, 5, "dev"]\n'
'\n'
)
return package_dir, config
@contextlib.contextmanager
def get_dist(tmpdir, kwargs_initial=None, parse=True):
kwargs_initial = kwargs_initial or {}
with tmpdir.as_cwd():
dist = Distribution(kwargs_initial)
dist.script_name = 'setup.py'
parse and dist.parse_config_files()
yield dist
def test_parsers_implemented():
with pytest.raises(NotImplementedError):
handler = ErrConfigHandler(None, {})
handler.parsers
class TestConfigurationReader:
def test_basic(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = 10.1.1\n'
'keywords = one, two\n'
'\n'
'[options]\n'
'scripts = bin/a.py, bin/b.py\n'
)
config_dict = read_configuration('%s' % config)
assert config_dict['metadata']['version'] == '10.1.1'
assert config_dict['metadata']['keywords'] == ['one', 'two']
assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
def test_no_config(self, tmpdir):
with pytest.raises(DistutilsFileError):
read_configuration('%s' % tmpdir.join('setup.cfg'))
def test_ignore_errors(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = attr: none.VERSION\n'
'keywords = one, two\n'
)
with pytest.raises(ImportError):
read_configuration('%s' % config)
config_dict = read_configuration(
'%s' % config, ignore_option_errors=True)
assert config_dict['metadata']['keywords'] == ['one', 'two']
assert 'version' not in config_dict['metadata']
config.remove()
class TestMetadata:
def test_basic(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'version = 10.1.1\n'
'description = Some description\n'
'long_description_content_type = text/something\n'
'long_description = file: README\n'
'name = fake_name\n'
'keywords = one, two\n'
'provides = package, package.sub\n'
'license = otherlic\n'
'download_url = http://test.test.com/test/\n'
'maintainer_email = test@test.com\n'
)
tmpdir.join('README').write('readme contents\nline2')
meta_initial = {
# This will be used so `otherlic` won't replace it.
'license': 'BSD 3-Clause License',
}
with get_dist(tmpdir, meta_initial) as dist:
metadata = dist.metadata
assert metadata.version == '10.1.1'
assert metadata.description == 'Some description'
assert metadata.long_description_content_type == 'text/something'
assert metadata.long_description == 'readme contents\nline2'
assert metadata.provides == ['package', 'package.sub']
assert metadata.license == 'BSD 3-Clause License'
assert metadata.name == 'fake_name'
assert metadata.keywords == ['one', 'two']
assert metadata.download_url == 'http://test.test.com/test/'
assert metadata.maintainer_email == 'test@test.com'
def test_file_mixed(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'long_description = file: README.rst, CHANGES.rst\n'
'\n'
)
tmpdir.join('README.rst').write('readme contents\nline2')
tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff')
with get_dist(tmpdir) as dist:
assert dist.metadata.long_description == (
'readme contents\nline2\n'
'changelog contents\nand stuff'
)
def test_file_sandboxed(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'long_description = file: ../../README\n'
)
with get_dist(tmpdir, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files() # file: out of sandbox
def test_aliases(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'author-email = test@test.com\n'
'home-page = http://test.test.com/test/\n'
'summary = Short summary\n'
'platform = a, b\n'
'classifier =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3.5\n'
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.author_email == 'test@test.com'
assert metadata.url == 'http://test.test.com/test/'
assert metadata.description == 'Short summary'
assert metadata.platforms == ['a', 'b']
assert metadata.classifiers == [
'Framework :: Django',
'Programming Language :: Python :: 3.5',
]
def test_multiline(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'name = fake_name\n'
'keywords =\n'
' one\n'
' two\n'
'classifiers =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3.5\n'
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.keywords == ['one', 'two']
assert metadata.classifiers == [
'Framework :: Django',
'Programming Language :: Python :: 3.5',
]
def test_dict(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'project_urls =\n'
' Link One = https://example.com/one/\n'
' Link Two = https://example.com/two/\n'
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.project_urls == {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
}
def test_version(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package.VERSION\n'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
config.write(
'[metadata]\n'
'version = attr: fake_package.get_version\n'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '3.4.5.dev'
config.write(
'[metadata]\n'
'version = attr: fake_package.VERSION_MAJOR\n'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1'
subpack = tmpdir.join('fake_package').mkdir('subpackage')
subpack.join('__init__.py').write('')
subpack.join('submodule.py').write('VERSION = (2016, 11, 26)')
config.write(
'[metadata]\n'
'version = attr: fake_package.subpackage.submodule.VERSION\n'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '2016.11.26'
def test_version_file(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = file: fake_package/version.txt\n'
)
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n')
with pytest.raises(DistutilsOptionError):
with get_dist(tmpdir) as dist:
_ = dist.metadata.version
def test_version_with_package_dir_simple(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_simple.VERSION\n'
'[options]\n'
'package_dir =\n'
' = src\n',
package_path='src/fake_package_simple'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_version_with_package_dir_rename(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_rename.VERSION\n'
'[options]\n'
'package_dir =\n'
' fake_package_rename = fake_dir\n',
package_path='fake_dir'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_version_with_package_dir_complex(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_complex.VERSION\n'
'[options]\n'
'package_dir =\n'
' fake_package_complex = src/fake_dir\n',
package_path='src/fake_dir'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_unknown_meta_item(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'name = fake_name\n'
'unknown = some\n'
)
with get_dist(tmpdir, parse=False) as dist:
dist.parse_config_files() # Skip unknown.
def test_usupported_section(self, tmpdir):
fake_env(
tmpdir,
'[metadata.some]\n'
'key = val\n'
)
with get_dist(tmpdir, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files()
def test_classifiers(self, tmpdir):
expected = set([
'Framework :: Django',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
])
# From file.
_, config = fake_env(
tmpdir,
'[metadata]\n'
'classifiers = file: classifiers\n'
)
tmpdir.join('classifiers').write(
'Framework :: Django\n'
'Programming Language :: Python :: 3\n'
'Programming Language :: Python :: 3.5\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.metadata.classifiers) == expected
# From list notation
config.write(
'[metadata]\n'
'classifiers =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3\n'
' Programming Language :: Python :: 3.5\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.metadata.classifiers) == expected
class TestOptions:
def test_basic(self, tmpdir):
fake_env(
tmpdir,
'[options]\n'
'zip_safe = True\n'
'use_2to3 = 1\n'
'include_package_data = yes\n'
'package_dir = b=c, =src\n'
'packages = pack_a, pack_b.subpack\n'
'namespace_packages = pack1, pack2\n'
'use_2to3_fixers = your.fixers, or.here\n'
'use_2to3_exclude_fixers = one.here, two.there\n'
'convert_2to3_doctests = src/tests/one.txt, src/two.txt\n'
'scripts = bin/one.py, bin/two.py\n'
'eager_resources = bin/one.py, bin/two.py\n'
'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
'tests_require = mock==0.7.2; pytest\n'
'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
'dependency_links = http://some.com/here/1, '
'http://some.com/there/2\n'
'python_requires = >=1.0, !=2.8\n'
'py_modules = module1, module2\n'
)
with get_dist(tmpdir) as dist:
assert dist.zip_safe
assert dist.use_2to3
assert dist.include_package_data
assert dist.package_dir == {'': 'src', 'b': 'c'}
assert dist.packages == ['pack_a', 'pack_b.subpack']
assert dist.namespace_packages == ['pack1', 'pack2']
assert dist.use_2to3_fixers == ['your.fixers', 'or.here']
assert dist.use_2to3_exclude_fixers == ['one.here', 'two.there']
assert dist.convert_2to3_doctests == ([
'src/tests/one.txt', 'src/two.txt'])
assert dist.scripts == ['bin/one.py', 'bin/two.py']
assert dist.dependency_links == ([
'http://some.com/here/1',
'http://some.com/there/2'
])
assert dist.install_requires == ([
'docutils>=0.3',
'pack==1.1,==1.3',
'hey'
])
assert dist.setup_requires == ([
'docutils>=0.3',
'spack ==1.1, ==1.3',
'there'
])
assert dist.tests_require == ['mock==0.7.2', 'pytest']
assert dist.python_requires == '>=1.0, !=2.8'
assert dist.py_modules == ['module1', 'module2']
def test_multiline(self, tmpdir):
fake_env(
tmpdir,
'[options]\n'
'package_dir = \n'
' b=c\n'
' =src\n'
'packages = \n'
' pack_a\n'
' pack_b.subpack\n'
'namespace_packages = \n'
' pack1\n'
' pack2\n'
'use_2to3_fixers = \n'
' your.fixers\n'
' or.here\n'
'use_2to3_exclude_fixers = \n'
' one.here\n'
' two.there\n'
'convert_2to3_doctests = \n'
' src/tests/one.txt\n'
' src/two.txt\n'
'scripts = \n'
' bin/one.py\n'
' bin/two.py\n'
'eager_resources = \n'
' bin/one.py\n'
' bin/two.py\n'
'install_requires = \n'
' docutils>=0.3\n'
' pack ==1.1, ==1.3\n'
' hey\n'
'tests_require = \n'
' mock==0.7.2\n'
' pytest\n'
'setup_requires = \n'
' docutils>=0.3\n'
' spack ==1.1, ==1.3\n'
' there\n'
'dependency_links = \n'
' http://some.com/here/1\n'
' http://some.com/there/2\n'
)
with get_dist(tmpdir) as dist:
assert dist.package_dir == {'': 'src', 'b': 'c'}
assert dist.packages == ['pack_a', 'pack_b.subpack']
assert dist.namespace_packages == ['pack1', 'pack2']
assert dist.use_2to3_fixers == ['your.fixers', 'or.here']
assert dist.use_2to3_exclude_fixers == ['one.here', 'two.there']
assert dist.convert_2to3_doctests == (
['src/tests/one.txt', 'src/two.txt'])
assert dist.scripts == ['bin/one.py', 'bin/two.py']
assert dist.dependency_links == ([
'http://some.com/here/1',
'http://some.com/there/2'
])
assert dist.install_requires == ([
'docutils>=0.3',
'pack==1.1,==1.3',
'hey'
])
assert dist.setup_requires == ([
'docutils>=0.3',
'spack ==1.1, ==1.3',
'there'
])
assert dist.tests_require == ['mock==0.7.2', 'pytest']
def test_package_dir_fail(self, tmpdir):
fake_env(
tmpdir,
'[options]\n'
'package_dir = a b\n'
)
with get_dist(tmpdir, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files()
def test_package_data(self, tmpdir):
fake_env(
tmpdir,
'[options.package_data]\n'
'* = *.txt, *.rst\n'
'hello = *.msg\n'
'\n'
'[options.exclude_package_data]\n'
'* = fake1.txt, fake2.txt\n'
'hello = *.dat\n'
)
with get_dist(tmpdir) as dist:
assert dist.package_data == {
'': ['*.txt', '*.rst'],
'hello': ['*.msg'],
}
assert dist.exclude_package_data == {
'': ['fake1.txt', 'fake2.txt'],
'hello': ['*.dat'],
}
def test_packages(self, tmpdir):
fake_env(
tmpdir,
'[options]\n'
'packages = find:\n'
)
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package']
def test_find_directive(self, tmpdir):
dir_package, config = fake_env(
tmpdir,
'[options]\n'
'packages = find:\n'
)
dir_sub_one, _ = make_package_dir('sub_one', dir_package)
dir_sub_two, _ = make_package_dir('sub_two', dir_package)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == set([
'fake_package', 'fake_package.sub_two', 'fake_package.sub_one'
])
config.write(
'[options]\n'
'packages = find:\n'
'\n'
'[options.packages.find]\n'
'where = .\n'
'include =\n'
' fake_package.sub_one\n'
' two\n'
)
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package.sub_one']
config.write(
'[options]\n'
'packages = find:\n'
'\n'
'[options.packages.find]\n'
'exclude =\n'
' fake_package.sub_one\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == set(
['fake_package', 'fake_package.sub_two'])
@py2_only
def test_find_namespace_directive_fails_on_py2(self, tmpdir):
dir_package, config = fake_env(
tmpdir,
'[options]\n'
'packages = find_namespace:\n'
)
with pytest.raises(DistutilsOptionError):
with get_dist(tmpdir) as dist:
dist.parse_config_files()
@py3_only
def test_find_namespace_directive(self, tmpdir):
dir_package, config = fake_env(
tmpdir,
'[options]\n'
'packages = find_namespace:\n'
)
dir_sub_one, _ = make_package_dir('sub_one', dir_package)
dir_sub_two, _ = make_package_dir('sub_two', dir_package, ns=True)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == {
'fake_package', 'fake_package.sub_two', 'fake_package.sub_one'
}
config.write(
'[options]\n'
'packages = find_namespace:\n'
'\n'
'[options.packages.find]\n'
'where = .\n'
'include =\n'
' fake_package.sub_one\n'
' two\n'
)
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package.sub_one']
config.write(
'[options]\n'
'packages = find_namespace:\n'
'\n'
'[options.packages.find]\n'
'exclude =\n'
' fake_package.sub_one\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == {
'fake_package', 'fake_package.sub_two'
}
def test_extras_require(self, tmpdir):
fake_env(
tmpdir,
'[options.extras_require]\n'
'pdf = ReportLab>=1.2; RXP\n'
'rest = \n'
' docutils>=0.3\n'
' pack ==1.1, ==1.3\n'
)
with get_dist(tmpdir) as dist:
assert dist.extras_require == {
'pdf': ['ReportLab>=1.2', 'RXP'],
'rest': ['docutils>=0.3', 'pack==1.1,==1.3']
}
assert dist.metadata.provides_extras == set(['pdf', 'rest'])
def test_entry_points(self, tmpdir):
_, config = fake_env(
tmpdir,
'[options.entry_points]\n'
'group1 = point1 = pack.module:func, '
'.point2 = pack.module2:func_rest [rest]\n'
'group2 = point3 = pack.module:func2\n'
)
with get_dist(tmpdir) as dist:
assert dist.entry_points == {
'group1': [
'point1 = pack.module:func',
'.point2 = pack.module2:func_rest [rest]',
],
'group2': ['point3 = pack.module:func2']
}
expected = (
'[blogtool.parsers]\n'
'.rst = some.nested.module:SomeClass.some_classmethod[reST]\n'
)
tmpdir.join('entry_points').write(expected)
# From file.
config.write(
'[options]\n'
'entry_points = file: entry_points\n'
)
with get_dist(tmpdir) as dist:
assert dist.entry_points == expected
saved_dist_init = _Distribution.__init__
class TestExternalSetters:
# During creation of the setuptools Distribution() object, we call
# the init of the parent distutils Distribution object via
# _Distribution.__init__ ().
#
# It's possible distutils calls out to various keyword
# implementations (i.e. distutils.setup_keywords entry points)
# that may set a range of variables.
#
# This wraps distutil's Distribution.__init__ and simulates
# pbr or something else setting these values.
def _fake_distribution_init(self, dist, attrs):
saved_dist_init(dist, attrs)
# see self._DISTUTUILS_UNSUPPORTED_METADATA
setattr(dist.metadata, 'long_description_content_type',
'text/something')
# Test overwrite setup() args
setattr(dist.metadata, 'project_urls', {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
})
return None
@patch.object(_Distribution, '__init__', autospec=True)
def test_external_setters(self, mock_parent_init, tmpdir):
mock_parent_init.side_effect = self._fake_distribution_init
dist = Distribution(attrs={
'project_urls': {
'will_be': 'ignored'
}
})
assert dist.metadata.long_description_content_type == 'text/something'
assert dist.metadata.project_urls == {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
}
| 32.154362 | 78 | 0.51889 |
acf2216be63f118de8c25c1a29f99e2ca4d18b76 | 4,275 | py | Python | source/tests/shared/test_s3_event.py | aws-solutions/improving-forecast-accuracy-with-machine-learning | efdc3a56a8f78b0d292f209d98e43e5cf60bd8fb | [
"Apache-2.0"
] | 3 | 2021-10-30T12:53:33.000Z | 2022-02-09T06:40:56.000Z | source/tests/shared/test_s3_event.py | iyksam20/improving-forecast-accuracy-with-machine-learning | efdc3a56a8f78b0d292f209d98e43e5cf60bd8fb | [
"Apache-2.0"
] | 3 | 2022-01-04T14:17:05.000Z | 2022-03-16T15:09:43.000Z | source/tests/shared/test_s3_event.py | iyksam20/improving-forecast-accuracy-with-machine-learning | efdc3a56a8f78b0d292f209d98e43e5cf60bd8fb | [
"Apache-2.0"
] | 4 | 2021-10-30T12:53:28.000Z | 2022-02-17T04:47:47.000Z | # #####################################################################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed #
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for #
# the specific language governing permissions and limitations under the License. #
# #####################################################################################################################
import re
import pytest
import shared.s3.exceptions as exceptions
from shared.s3.notification import Event
@pytest.fixture(scope="function")
def event_missing_records():
return {}
@pytest.fixture(scope="function")
def event_invalid_version():
return {"Records": [{"eventVersion": "3.0"}]}
@pytest.fixture(scope="function")
def event_no_bucket():
return {
"Records": [
{
"eventVersion": "2.2",
}
]
}
@pytest.fixture(scope="function")
def event_no_key():
return {
"Records": [
{
"eventVersion": "2.2",
"s3": {
"bucket": {
"name": "test-bucket",
}
},
}
]
}
def test_s3_event_handler_missing_records(event_missing_records):
with pytest.raises(exceptions.RecordNotFound):
Event(event_missing_records)
def test_s3_event_handler_invalid_version(event_invalid_version):
with pytest.raises(exceptions.RecordNotSupported):
Event(event_invalid_version)
def test_s3_event_handler_no_bucket(event_no_bucket):
with pytest.raises(exceptions.BucketNotFound):
Event(event_no_bucket)
event_no_bucket["Records"][0]["s3"] = {}
with pytest.raises(exceptions.BucketNotFound):
Event(event_no_bucket)
event_no_bucket["Records"][0]["s3"]["bucket"] = {}
with pytest.raises(exceptions.BucketNotFound):
Event(event_no_bucket)
event_no_bucket["Records"][0]["s3"]["bucket"]["name"] = "test-bucket"
with pytest.raises(exceptions.KeyNotFound):
Event(event_no_bucket)
def test_s3_event_handler_no_key(event_no_key):
with pytest.raises(exceptions.KeyNotFound):
Event(event_no_key)
event_no_key["Records"][0]["s3"]["object"] = {}
with pytest.raises(exceptions.KeyNotFound):
Event(event_no_key)
def test_s3_event_handler_id(event_no_key):
event_no_key["Records"][0]["s3"]["object"] = {}
event_no_key["Records"][0]["s3"]["object"]["key"] = "test-key.csv"
s3_handler = Event(event_no_key)
assert s3_handler.bucket == "test-bucket"
id_matcher = re.compile(f"test-key_target_time_series_[0-9a-f]+")
id_match = s3_handler.event_id
assert id_matcher.match(id_match)
@pytest.mark.parametrize(
"url_encoded,key",
[
("test+key.csv", "test key.csv"),
("test-key.csv", "test-key.csv"),
("test%F0%9F%98%81key.csv", "test😁key.csv"),
],
)
def test_s3_event_handler_unquote_plus(event_no_key, url_encoded, key):
event_no_key["Records"][0]["s3"]["object"] = {}
event_no_key["Records"][0]["s3"]["object"]["key"] = url_encoded
s3_handler = Event(event_no_key)
assert s3_handler.bucket == "test-bucket"
assert s3_handler.key == key
| 35.040984 | 119 | 0.530526 |
acf222553f31c3f67db86e10fe5b98e89006fc3b | 2,427 | py | Python | EdgeDetection.py | neohope/NeoDemosImageProcess | ff08fb110464fef3433d74500792894408e26051 | [
"BSD-3-Clause"
] | null | null | null | EdgeDetection.py | neohope/NeoDemosImageProcess | ff08fb110464fef3433d74500792894408e26051 | [
"BSD-3-Clause"
] | null | null | null | EdgeDetection.py | neohope/NeoDemosImageProcess | ff08fb110464fef3433d74500792894408e26051 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# -*- coding:utf-8 -*-
import cv2
import numpy as np
import matplotlib.pyplot as plt
def edge_detection(img):
img_RGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) #转成RGB 方便后面显示
grayImage = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
#高斯滤波
gaussianBlur = cv2.GaussianBlur(grayImage, (3,3), 0)
#阈值处理
ret, binary = cv2.threshold(gaussianBlur, 127, 255, cv2.THRESH_BINARY)
#Roberts算子
kernelx = np.array([[-1,0],[0,1]], dtype=int)
kernely = np.array([[0,-1],[1,0]], dtype=int)
x = cv2.filter2D(binary, cv2.CV_16S, kernelx)
y = cv2.filter2D(binary, cv2.CV_16S, kernely)
absX = cv2.convertScaleAbs(x)
absY = cv2.convertScaleAbs(y)
Roberts = cv2.addWeighted(absX, 0.5, absY, 0.5, 0)
#Prewitt算子
kernelx = np.array([[1,1,1],[0,0,0],[-1,-1,-1]], dtype=int)
kernely = np.array([[-1,0,1],[-1,0,1],[-1,0,1]], dtype=int)
x = cv2.filter2D(binary, cv2.CV_16S, kernelx)
y = cv2.filter2D(binary, cv2.CV_16S, kernely)
absX = cv2.convertScaleAbs(x)
absY = cv2.convertScaleAbs(y)
Prewitt = cv2.addWeighted(absX,0.5,absY,0.5,0)
#Sobel算子
x = cv2.Sobel(binary, cv2.CV_16S, 1, 0)
y = cv2.Sobel(binary, cv2.CV_16S, 0, 1)
absX = cv2.convertScaleAbs(x)
absY = cv2.convertScaleAbs(y)
Sobel = cv2.addWeighted(absX, 0.5, absY, 0.5, 0)
#Laplacian算子
dst = cv2.Laplacian(binary, cv2.CV_16S, ksize = 3)
Laplacian = cv2.convertScaleAbs(dst)
#Scharr算子
x = cv2.Scharr(grayImage, cv2.CV_32F, 1, 0) #X方向
y = cv2.Scharr(grayImage, cv2.CV_32F, 0, 1) #Y方向
absX = cv2.convertScaleAbs(x)
absY = cv2.convertScaleAbs(y)
Scharr = cv2.addWeighted(absX, 0.5, absY, 0.5, 0)
# Canny算子
Canny = cv2.Canny(gaussianBlur, 50, 150)
#LOG算子
dst = cv2.Laplacian(gaussianBlur, cv2.CV_16S, ksize = 3) #再通过拉普拉斯算子做边缘检测
LOG = cv2.convertScaleAbs(dst)
images = [img_RGB, binary, Roberts, Prewitt, Sobel, Laplacian, Scharr, Canny, LOG]
titles = ['原始图像', '二值图', 'Roberts算子', 'Prewitt算子', 'Sobel算子', 'Laplacian算子', 'Scharr算子', 'Canny算子', 'LOG算子']
plt.rcParams['font.sans-serif'] = ['SimHei']
for i in range(9):
plt.subplot(3, 3, i + 1), plt.imshow(images[i], 'gray')
plt.title(titles[i])
plt.xticks([]), plt.yticks([])
plt.show()
if __name__ == '__main__':
img = cv2.imread("images/Forest_500X280.jpg")
edge_detection(img)
| 32.797297 | 112 | 0.624639 |
acf22397c55a36a7eea943cdb9be1af1c17e9c0e | 3,271 | py | Python | purity_fb/purity_fb_1dot0/models/login_response.py | unixtreme/purity_fb_python_client | e836afe9804ffa99f74bf4b5202f181c3c04d9df | [
"Apache-2.0"
] | null | null | null | purity_fb/purity_fb_1dot0/models/login_response.py | unixtreme/purity_fb_python_client | e836afe9804ffa99f74bf4b5202f181c3c04d9df | [
"Apache-2.0"
] | null | null | null | purity_fb/purity_fb_1dot0/models/login_response.py | unixtreme/purity_fb_python_client | e836afe9804ffa99f74bf4b5202f181c3c04d9df | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Purity//FB REST Client
Client for Purity//FB REST API (1.0), developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.0
Contact: info@purestorage.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class LoginResponse(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'username': 'str'
}
attribute_map = {
'username': 'username'
}
def __init__(self, username=None):
"""
LoginResponse - a model defined in Swagger
"""
self._username = None
self.username = username
@property
def username(self):
"""
Gets the username of this LoginResponse.
the name of the login user
:return: The username of this LoginResponse.
:rtype: str
"""
return self._username
@username.setter
def username(self, username):
"""
Sets the username of this LoginResponse.
the name of the login user
:param username: The username of this LoginResponse.
:type: str
"""
if username is None:
raise ValueError("Invalid value for `username`, must not be `None`")
self._username = username
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, LoginResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 25.755906 | 197 | 0.546928 |
acf224800506da1289cc8819bc504e4c22fff604 | 3,129 | py | Python | scorecard/tests/indicators/test_cash_balance.py | Code4SA/municipal-data-api | 8b213b702245bc2ff1bab4bd160c4cd3b604d54f | [
"MIT"
] | null | null | null | scorecard/tests/indicators/test_cash_balance.py | Code4SA/municipal-data-api | 8b213b702245bc2ff1bab4bd160c4cd3b604d54f | [
"MIT"
] | null | null | null | scorecard/tests/indicators/test_cash_balance.py | Code4SA/municipal-data-api | 8b213b702245bc2ff1bab4bd160c4cd3b604d54f | [
"MIT"
] | null | null | null | from ...profile_data import ApiData
from ...profile_data.indicators import (
CashBalance,
)
from . import (
import_data,
_IndicatorTestCase,
)
from .resources import (
GeographyResource,
CashFlowFactsV1Resource,
CashFlowFactsV2Resource,
)
class TestCashBalance(_IndicatorTestCase):
def test_result(self):
# Load sample data
import_data(
GeographyResource,
'cash_balance/scorecard_geography.csv'
)
import_data(
CashFlowFactsV1Resource,
'cash_balance/cash_flow_facts_v1.csv'
)
import_data(
CashFlowFactsV2Resource,
'cash_balance/cash_flow_facts_v2.csv'
)
# Fetch data from API
api_data = ApiData(self.api_client, "CPT", 2020, 2020, 2020, '2020q4')
api_data.fetch_data([
"cash_flow_v1",
"cash_flow_v2",
])
# Provide data to indicator
result = CashBalance.get_muni_specifics(api_data)
self.assertEqual(
result,
{
"result_type": "R",
"values": [
{
"date": 2020,
"result": 7101183182.0,
"rating": "good",
"cube_version": "v2"
},
{
"date": 2019,
"result": -3448597019.0,
"rating": "bad",
"cube_version": "v1"
},
{
"date": 2018,
"result": 5806824000.0,
"rating": "good",
"cube_version": "v1"
},
{
"date": 2017,
"result": 3773576000.0,
"rating": "good",
"cube_version": "v1"
}
],
"ref": {
"title": "State of Local Government Finances",
"url": "http://mfma.treasury.gov.za/Media_Releases/The%20state%20of%20local%20government%20finances/Pages/default.aspx"
},
"last_year": 2020,
"formula": {
"text": "= Cash available at year end",
"actual": [
"=",
{
"cube": "cflow",
"item_codes": ["4200"],
"amount_type": "AUDA",
}
],
},
"formula_v2": {
"text": "= Cash available at year end",
"actual": [
"=",
{
"cube": "cflow_v2",
"item_codes": ["0430"],
"amount_type": "AUDA",
}
],
},
}
)
| 31.29 | 139 | 0.3752 |
acf22489537e7b0c29caed197de9d79155ee89b3 | 1,951 | py | Python | google/cloud/servicedirectory/v1beta1/servicedirectory-v1beta1-py/google/cloud/servicedirectory_v1beta1/types/__init__.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 7 | 2021-02-21T10:39:41.000Z | 2021-12-07T07:31:28.000Z | google/cloud/servicedirectory/v1/servicedirectory-v1-py/google/cloud/servicedirectory_v1/types/__init__.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 6 | 2021-02-02T23:46:11.000Z | 2021-11-15T01:46:02.000Z | google/cloud/servicedirectory/v1beta1/servicedirectory-v1beta1-py/google/cloud/servicedirectory_v1beta1/types/__init__.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 4 | 2021-01-28T23:25:45.000Z | 2021-08-30T01:55:16.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .endpoint import (
Endpoint,
)
from .lookup_service import (
ResolveServiceRequest,
ResolveServiceResponse,
)
from .namespace import (
Namespace,
)
from .registration_service import (
CreateEndpointRequest,
CreateNamespaceRequest,
CreateServiceRequest,
DeleteEndpointRequest,
DeleteNamespaceRequest,
DeleteServiceRequest,
GetEndpointRequest,
GetNamespaceRequest,
GetServiceRequest,
ListEndpointsRequest,
ListEndpointsResponse,
ListNamespacesRequest,
ListNamespacesResponse,
ListServicesRequest,
ListServicesResponse,
UpdateEndpointRequest,
UpdateNamespaceRequest,
UpdateServiceRequest,
)
from .service import (
Service,
)
__all__ = (
'Endpoint',
'ResolveServiceRequest',
'ResolveServiceResponse',
'Namespace',
'CreateEndpointRequest',
'CreateNamespaceRequest',
'CreateServiceRequest',
'DeleteEndpointRequest',
'DeleteNamespaceRequest',
'DeleteServiceRequest',
'GetEndpointRequest',
'GetNamespaceRequest',
'GetServiceRequest',
'ListEndpointsRequest',
'ListEndpointsResponse',
'ListNamespacesRequest',
'ListNamespacesResponse',
'ListServicesRequest',
'ListServicesResponse',
'UpdateEndpointRequest',
'UpdateNamespaceRequest',
'UpdateServiceRequest',
'Service',
)
| 26.013333 | 74 | 0.732445 |
acf2259744d90238365b1b18f42bf43afd98a2b1 | 2,294 | py | Python | src/python/pants/backend/docgen/target_types.py | silverguo/pants | 141510d03fbf2b7e1a0b54f66b54088697f6fa51 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/docgen/target_types.py | silverguo/pants | 141510d03fbf2b7e1a0b54f66b54088697f6fa51 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/docgen/target_types.py | silverguo/pants | 141510d03fbf2b7e1a0b54f66b54088697f6fa51 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from typing import Iterable, Optional, Tuple
from pants.backend.docgen.targets.doc import WikiArtifact
from pants.engine.addresses import Address
from pants.engine.target import (
COMMON_TARGET_FIELDS,
Dependencies,
SequenceField,
Sources,
StringField,
StringSequenceField,
Target,
)
class PageSources(Sources):
"""Exactly one Markdown (.md) or RST (.rst) file."""
required = True
expected_file_extensions = (".md", ".rst")
expected_num_files = 1
class PageSourcesFormat(StringField):
"""The file format for the page.
This will be inferred from the source file if not explicitly specified.
"""
alias = "format"
valid_choices = ("md", "rst")
class PageLinks(StringSequenceField):
"""Addresses to other `page` targets that this `page` links to."""
alias = "links"
# TODO: This should probably subclass `ProvidesField` so that `list --provides` will include the
# value. But, it looks like V1 doesn't do this and `list` wouldn't know how to handle this being
# a sequence.
class PageProvides(SequenceField):
"""Wiki locations at which to publish the page."""
expected_element_type = WikiArtifact
expected_type_description = "an iterable of `wiki_artifact` objects (e.g. a list)"
@classmethod
def compute_value(
cls, raw_value: Optional[Iterable[WikiArtifact]], *, address: Address
) -> Optional[Tuple[WikiArtifact, ...]]:
return super().compute_value(raw_value, address=address)
class Page(Target):
"""A documentation page.
Here is an example, which shows a markdown page providing a wiki page on an Atlassian Confluence
Wiki:
page(
name='home_page',
source='home_page.md',
provides=[
wiki_artifact(
wiki=Wiki('foozle', <url builder>),
space='my_space',
title='my_page',
parent='my_parent',
),
],
)
"""
alias = "page"
core_fields = (*COMMON_TARGET_FIELDS, Dependencies, PageSources, PageSourcesFormat, PageLinks)
v1_only = True
| 27.97561 | 100 | 0.6517 |
acf225f940504370001c764984f8b71ba6ec0b50 | 3,676 | py | Python | fseval/config/pipeline.py | shenlong95/fseval | 4d2e6618b8838f9e52fe60621b08595dc4c5b4fb | [
"MIT"
] | 1 | 2022-02-11T03:24:23.000Z | 2022-02-11T03:24:23.000Z | fseval/config/pipeline.py | shenlong95/fseval | 4d2e6618b8838f9e52fe60621b08595dc4c5b4fb | [
"MIT"
] | null | null | null | fseval/config/pipeline.py | shenlong95/fseval | 4d2e6618b8838f9e52fe60621b08595dc4c5b4fb | [
"MIT"
] | null | null | null | from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional
from hydra.core.config_store import ConfigStore
from omegaconf import MISSING
from .cross_validator import CrossValidatorConfig
from .dataset import DatasetConfig
from .estimator import EstimatorConfig
from .resample import ResampleConfig
from .storage import StorageConfig
cs = ConfigStore.instance()
@dataclass
class PipelineConfig:
"""
The complete configuration needed to run the fseval pipeline.
Attributes:
dataset (DatasetConfig): Determines the dataset to use for this experiment.
cv (CrossValidatorConfig): The CV method and split to use in this experiment.
resample (ResampleConfig): Dataset resampling; e.g. with or without replacement.
ranker (EstimatorConfig): A Feature Ranker or Feature Selector.
validator (EstimatorConfig): Some estimator to validate the feature subsets.
storage (StorageConfig): A storage method used to store the fit estimators.
callbacks (Dict[str, Any]): Callbacks. Provide hooks for storing the config or
results.
metrics (Dict[str, Any]): Metrics allow custom computation after any pipeline
stage.
n_bootstraps (int): Amount of 'bootstraps' to run. A bootstrap means running
the pipeline again but with a resampled (see `resample`) version of the
dataset. This allows estimating stability, for example.
n_jobs (Optional[int]): Amount of CPU's to use for computing each bootstrap.
This thus distributes the amount of bootstraps over CPU's.
all_features_to_select (str): Once the ranker has been fit, this determines
the feature subsets to validate. By default, at most 50 subsets containing
the highest ranked features are validated. The format of this parameter is
a string that can contain an arbitrary Python expression. The condition is
that the expression must evaluate to a `List[int]` object. For example, the
default is: `range(1, min(50, p) + 1)`. Each number in the list is passed
to the `sklearn.feature_selection.SelectFromModel` as the `max_features`
parameter. To see how the expression is evaluated, check out the
`fseval.pipelines.rank_and_validate._dataset_validator` module.
defaults (List[Any]): Default values for the above.
"""
dataset: DatasetConfig = MISSING
cv: CrossValidatorConfig = MISSING
resample: ResampleConfig = MISSING
ranker: EstimatorConfig = MISSING
validator: EstimatorConfig = MISSING
storage: StorageConfig = MISSING
callbacks: Dict[str, Any] = field(default_factory=lambda: {})
metrics: Dict[str, Any] = field(default_factory=lambda: {})
n_bootstraps: int = 1
n_jobs: Optional[int] = 1
all_features_to_select: str = "range(1, min(50, p) + 1)"
# default values for the above.
defaults: List[Any] = field(
default_factory=lambda: [
"_self_",
{"dataset": MISSING},
{"cv": "kfold"},
{"resample": "shuffle"},
{"ranker": MISSING},
{"validator": MISSING},
{"storage": "local"},
{"callbacks": []},
{"metrics": ["feature_importances", "ranking_scores", "validation_scores"]},
{"override hydra/job_logging": "colorlog"},
{"override hydra/hydra_logging": "colorlog"},
]
)
# required for instantiation
_target_: str = "fseval.pipelines.rank_and_validate.BootstrappedRankAndValidate"
cs.store("base_pipeline_config", PipelineConfig)
| 44.289157 | 88 | 0.679543 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.