hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acf60fb1aef62c0269592bb0a74fe1beecaea455 | 4,409 | py | Python | scripts/sar/extra/yago3/UCL_ARRAY_YAGO3_SAR_SMALLDATA_v1.py | issca/inferbeddings | 80492a7aebcdcac21e758514c8af403d77e8594a | [
"MIT"
] | 33 | 2017-07-25T14:31:00.000Z | 2019-03-06T09:18:00.000Z | scripts/sar/extra/yago3/UCL_ARRAY_YAGO3_SAR_SMALLDATA_v1.py | issca/inferbeddings | 80492a7aebcdcac21e758514c8af403d77e8594a | [
"MIT"
] | 1 | 2017-08-22T13:49:30.000Z | 2017-08-22T13:49:30.000Z | scripts/sar/extra/yago3/UCL_ARRAY_YAGO3_SAR_SMALLDATA_v1.py | issca/inferbeddings | 80492a7aebcdcac21e758514c8af403d77e8594a | [
"MIT"
] | 9 | 2017-10-05T08:50:45.000Z | 2019-04-18T12:40:56.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import itertools
import os
import os.path
import sys
import argparse
import logging
def cartesian_product(dicts):
return list(dict(zip(dicts, x)) for x in itertools.product(*dicts.values()))
def summary(configuration):
kvs = sorted([(k, v) for k, v in configuration.items()], key=lambda e: e[0])
return '_'.join([('%s=%s' % (k, v)) for (k, v) in kvs])
def to_cmd(c, _path=None):
if _path is None:
_path = '/home/pminervi/workspace/inferbeddings/'
unit_cube_str = '--unit-cube' if c['unit_cube'] else ''
loss_str = ''
if c['loss'] == 'hinge':
loss_str = '--loss hinge'
elif c['loss'] == 'pairwise_hinge':
loss_str = '--pairwise-loss hinge'
assert loss_str is not None
command = 'python3 {}/bin/kbp-cli.py' \
' --train {}/data/yago3_mte10_5k/yago3_mte10-train.tsv.gz' \
' --valid {}/data/yago3_mte10_5k/yago3_mte10-valid.tsv.gz' \
' --test {}/data/yago3_mte10_5k/yago3_mte10-test.tsv.gz' \
' --clauses {}/data/yago3_mte10_5k/clauses/{}' \
' --nb-epochs {}' \
' --nb-batches 10' \
' --model {}' \
' --similarity {}' \
' --margin {}' \
' --entity-embedding-size {}' \
' --predicate-embedding-size {}' \
' --subsample-size {}' \
' {} {} --sar-weight {} --sar-similarity {}' \
''.format(_path, _path, _path, _path, _path,
c['clauses'], c['epochs'],
c['model'], c['similarity'],
c['margin'], c['embedding_size'], c['embedding_size'] ** 2, c['subsample_size'],
loss_str, unit_cube_str, c['sar_weight'], c['sar_similarity'])
return command
def to_logfile(c, path):
outfile = "%s/ucl_yago3_sar_smalldata_v1.%s.log" % (path, summary(c))
return outfile
def main(argv):
def formatter(prog):
return argparse.HelpFormatter(prog, max_help_position=100, width=200)
argparser = argparse.ArgumentParser('Generating experiments for the UCL cluster', formatter_class=formatter)
argparser.add_argument('--debug', '-D', action='store_true', help='Debug flag')
argparser.add_argument('--path', '-p', action='store', type=str, default=None, help='Path')
args = argparser.parse_args(argv)
hyperparameters_space_1 = dict(
epochs=[1000],
model=['RESCAL'],
similarity=['dot'],
margin=[1], # margin=[1, 2, 5, 10],
embedding_size=[10, 20, 30, 40, 50, 100],
unit_cube=[True, False],
sar_weight=[0, .0001, .01, 1, 100, 10000, 1000000],
sar_similarity=['dot', 'l2_sqr'],
subsample_size=[0.1, 0.3, 0.5, 0.7, 0.9, 1],
loss=['hinge', 'pairwise_hinge'],
clauses=['clauses_equivalencies.pl']
)
configurations = cartesian_product(hyperparameters_space_1)
path = '/home/pminervi/workspace/inferbeddings/logs/sar/extra/ucl_yago3_sar_smalldata_v1/'
# Check that we are on the UCLCS cluster first
if os.path.exists('/home/pminervi/'):
# If the folder that will contain logs does not exist, create it
if not os.path.exists(path):
os.makedirs(path)
configurations = list(configurations)
command_lines = set()
for cfg in configurations:
logfile = to_logfile(cfg, path)
completed = False
if os.path.isfile(logfile):
with open(logfile, 'r', encoding='utf-8', errors='ignore') as f:
content = f.read()
completed = '### MICRO (test filtered)' in content
if not completed:
command_line = '{} >> {} 2>&1'.format(to_cmd(cfg, _path=args.path), logfile)
command_lines |= {command_line}
# Sort command lines and remove duplicates
sorted_command_lines = sorted(command_lines)
nb_jobs = len(sorted_command_lines)
header = """#!/bin/bash
#$ -cwd
#$ -S /bin/bash
#$ -o /dev/null
#$ -e /dev/null
#$ -t 1-{}
#$ -l h_vmem=12G,tmem=12G
#$ -l h_rt=6:00:00
""".format(nb_jobs)
print(header)
for job_id, command_line in enumerate(sorted_command_lines, 1):
print('test $SGE_TASK_ID -eq {} && {}'.format(job_id, command_line))
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main(sys.argv[1:])
| 32.902985 | 112 | 0.584259 |
acf61000a45cb4b2e899638fb3310aed38d04d3a | 20,149 | py | Python | ansys/dpf/core/operators/result/cyclic_volume.py | jfthuong/pydpf-core | bf2895ebc546e0004f759289bfc9a23196559ac3 | [
"MIT"
] | 18 | 2021-10-16T10:38:29.000Z | 2022-03-29T11:26:42.000Z | ansys/dpf/core/operators/result/cyclic_volume.py | jfthuong/pydpf-core | bf2895ebc546e0004f759289bfc9a23196559ac3 | [
"MIT"
] | 79 | 2021-10-11T23:18:54.000Z | 2022-03-29T14:53:14.000Z | ansys/dpf/core/operators/result/cyclic_volume.py | jfthuong/pydpf-core | bf2895ebc546e0004f759289bfc9a23196559ac3 | [
"MIT"
] | 5 | 2021-11-29T18:35:37.000Z | 2022-03-16T16:49:21.000Z | """
cyclic_volume
===============
Autogenerated DPF operator classes.
"""
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
class cyclic_volume(Operator):
"""Read mapdl::rst::ENG_VOL from an rst file.
Parameters
----------
time_scoping : Scoping, optional
mesh_scoping : ScopingsContainer or Scoping, optional
fields_container : FieldsContainer, optional
Fieldscontainer already allocated modified
inplace
streams_container : StreamsContainer or Stream, optional
Streams containing the result file.
data_sources : DataSources
Data sources containing the result file.
bool_rotate_to_global : bool, optional
If true the field is roated to global
coordinate system (default true)
sector_mesh : MeshedRegion or MeshesContainer, optional
Mesh of the base sector (can be a skin).
read_cyclic : int, optional
If 0 cyclic symmetry is ignored, if 1 cyclic
sector is read, if 2 cyclic expansion
is done, if 3 cyclic expansion is
done and stages are merged (default
is 1)
expanded_meshed_region : MeshedRegion or MeshesContainer, optional
Mesh expanded.
cyclic_support : CyclicSupport, optional
Examples
--------
>>> from ansys.dpf import core as dpf
>>> # Instantiate operator
>>> op = dpf.operators.result.cyclic_volume()
>>> # Make input connections
>>> my_time_scoping = dpf.Scoping()
>>> op.inputs.time_scoping.connect(my_time_scoping)
>>> my_mesh_scoping = dpf.ScopingsContainer()
>>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
>>> my_streams_container = dpf.StreamsContainer()
>>> op.inputs.streams_container.connect(my_streams_container)
>>> my_data_sources = dpf.DataSources()
>>> op.inputs.data_sources.connect(my_data_sources)
>>> my_bool_rotate_to_global = bool()
>>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
>>> my_sector_mesh = dpf.MeshedRegion()
>>> op.inputs.sector_mesh.connect(my_sector_mesh)
>>> my_read_cyclic = int()
>>> op.inputs.read_cyclic.connect(my_read_cyclic)
>>> my_expanded_meshed_region = dpf.MeshedRegion()
>>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region)
>>> my_cyclic_support = dpf.CyclicSupport()
>>> op.inputs.cyclic_support.connect(my_cyclic_support)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.result.cyclic_volume(
... time_scoping=my_time_scoping,
... mesh_scoping=my_mesh_scoping,
... fields_container=my_fields_container,
... streams_container=my_streams_container,
... data_sources=my_data_sources,
... bool_rotate_to_global=my_bool_rotate_to_global,
... sector_mesh=my_sector_mesh,
... read_cyclic=my_read_cyclic,
... expanded_meshed_region=my_expanded_meshed_region,
... cyclic_support=my_cyclic_support,
... )
>>> # Get output data
>>> result_fields_container = op.outputs.fields_container()
>>> result_expanded_meshes = op.outputs.expanded_meshes()
"""
def __init__(
self,
time_scoping=None,
mesh_scoping=None,
fields_container=None,
streams_container=None,
data_sources=None,
bool_rotate_to_global=None,
sector_mesh=None,
read_cyclic=None,
expanded_meshed_region=None,
cyclic_support=None,
config=None,
server=None,
):
super().__init__(
name="mapdl::rst::ENG_VOL_cyclic", config=config, server=server
)
self._inputs = InputsCyclicVolume(self)
self._outputs = OutputsCyclicVolume(self)
if time_scoping is not None:
self.inputs.time_scoping.connect(time_scoping)
if mesh_scoping is not None:
self.inputs.mesh_scoping.connect(mesh_scoping)
if fields_container is not None:
self.inputs.fields_container.connect(fields_container)
if streams_container is not None:
self.inputs.streams_container.connect(streams_container)
if data_sources is not None:
self.inputs.data_sources.connect(data_sources)
if bool_rotate_to_global is not None:
self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global)
if sector_mesh is not None:
self.inputs.sector_mesh.connect(sector_mesh)
if read_cyclic is not None:
self.inputs.read_cyclic.connect(read_cyclic)
if expanded_meshed_region is not None:
self.inputs.expanded_meshed_region.connect(expanded_meshed_region)
if cyclic_support is not None:
self.inputs.cyclic_support.connect(cyclic_support)
@staticmethod
def _spec():
description = """Read mapdl::rst::ENG_VOL from an rst file."""
spec = Specification(
description=description,
map_input_pin_spec={
0: PinSpecification(
name="time_scoping",
type_names=["scoping", "vector<int32>"],
optional=True,
document="""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector<int32>"],
optional=True,
document="""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
document="""Fieldscontainer already allocated modified
inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
document="""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
document="""Data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
document="""If true the field is roated to global
coordinate system (default true)""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
document="""Mesh of the base sector (can be a skin).""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
document="""If 0 cyclic symmetry is ignored, if 1 cyclic
sector is read, if 2 cyclic expansion
is done, if 3 cyclic expansion is
done and stages are merged (default
is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
document="""Mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
document="""""",
),
},
map_output_pin_spec={
0: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=False,
document="""Fieldscontainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
document="""""",
),
},
)
return spec
@staticmethod
def default_config(server=None):
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
instantiate the operator. The Configuration allows to customize
how the operation will be processed by the operator.
Parameters
----------
server : server.DPFServer, optional
Server with channel connected to the remote or local instance. When
``None``, attempts to use the the global server.
"""
return Operator.default_config(name="mapdl::rst::ENG_VOL_cyclic", server=server)
@property
def inputs(self):
"""Enables to connect inputs to the operator
Returns
--------
inputs : InputsCyclicVolume
"""
return super().inputs
@property
def outputs(self):
"""Enables to get outputs of the operator by evaluationg it
Returns
--------
outputs : OutputsCyclicVolume
"""
return super().outputs
class InputsCyclicVolume(_Inputs):
"""Intermediate class used to connect user inputs to
cyclic_volume operator.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> my_time_scoping = dpf.Scoping()
>>> op.inputs.time_scoping.connect(my_time_scoping)
>>> my_mesh_scoping = dpf.ScopingsContainer()
>>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
>>> my_streams_container = dpf.StreamsContainer()
>>> op.inputs.streams_container.connect(my_streams_container)
>>> my_data_sources = dpf.DataSources()
>>> op.inputs.data_sources.connect(my_data_sources)
>>> my_bool_rotate_to_global = bool()
>>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
>>> my_sector_mesh = dpf.MeshedRegion()
>>> op.inputs.sector_mesh.connect(my_sector_mesh)
>>> my_read_cyclic = int()
>>> op.inputs.read_cyclic.connect(my_read_cyclic)
>>> my_expanded_meshed_region = dpf.MeshedRegion()
>>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region)
>>> my_cyclic_support = dpf.CyclicSupport()
>>> op.inputs.cyclic_support.connect(my_cyclic_support)
"""
def __init__(self, op: Operator):
super().__init__(cyclic_volume._spec().inputs, op)
self._time_scoping = Input(cyclic_volume._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._time_scoping)
self._mesh_scoping = Input(cyclic_volume._spec().input_pin(1), 1, op, -1)
self._inputs.append(self._mesh_scoping)
self._fields_container = Input(cyclic_volume._spec().input_pin(2), 2, op, -1)
self._inputs.append(self._fields_container)
self._streams_container = Input(cyclic_volume._spec().input_pin(3), 3, op, -1)
self._inputs.append(self._streams_container)
self._data_sources = Input(cyclic_volume._spec().input_pin(4), 4, op, -1)
self._inputs.append(self._data_sources)
self._bool_rotate_to_global = Input(
cyclic_volume._spec().input_pin(5), 5, op, -1
)
self._inputs.append(self._bool_rotate_to_global)
self._sector_mesh = Input(cyclic_volume._spec().input_pin(7), 7, op, -1)
self._inputs.append(self._sector_mesh)
self._read_cyclic = Input(cyclic_volume._spec().input_pin(14), 14, op, -1)
self._inputs.append(self._read_cyclic)
self._expanded_meshed_region = Input(
cyclic_volume._spec().input_pin(15), 15, op, -1
)
self._inputs.append(self._expanded_meshed_region)
self._cyclic_support = Input(cyclic_volume._spec().input_pin(16), 16, op, -1)
self._inputs.append(self._cyclic_support)
@property
def time_scoping(self):
"""Allows to connect time_scoping input to the operator.
Parameters
----------
my_time_scoping : Scoping
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.time_scoping.connect(my_time_scoping)
>>> # or
>>> op.inputs.time_scoping(my_time_scoping)
"""
return self._time_scoping
@property
def mesh_scoping(self):
"""Allows to connect mesh_scoping input to the operator.
Parameters
----------
my_mesh_scoping : ScopingsContainer or Scoping
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
>>> # or
>>> op.inputs.mesh_scoping(my_mesh_scoping)
"""
return self._mesh_scoping
@property
def fields_container(self):
"""Allows to connect fields_container input to the operator.
Fieldscontainer already allocated modified
inplace
Parameters
----------
my_fields_container : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.fields_container.connect(my_fields_container)
>>> # or
>>> op.inputs.fields_container(my_fields_container)
"""
return self._fields_container
@property
def streams_container(self):
"""Allows to connect streams_container input to the operator.
Streams containing the result file.
Parameters
----------
my_streams_container : StreamsContainer or Stream
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.streams_container.connect(my_streams_container)
>>> # or
>>> op.inputs.streams_container(my_streams_container)
"""
return self._streams_container
@property
def data_sources(self):
"""Allows to connect data_sources input to the operator.
Data sources containing the result file.
Parameters
----------
my_data_sources : DataSources
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.data_sources.connect(my_data_sources)
>>> # or
>>> op.inputs.data_sources(my_data_sources)
"""
return self._data_sources
@property
def bool_rotate_to_global(self):
"""Allows to connect bool_rotate_to_global input to the operator.
If true the field is roated to global
coordinate system (default true)
Parameters
----------
my_bool_rotate_to_global : bool
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global)
>>> # or
>>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global)
"""
return self._bool_rotate_to_global
@property
def sector_mesh(self):
"""Allows to connect sector_mesh input to the operator.
Mesh of the base sector (can be a skin).
Parameters
----------
my_sector_mesh : MeshedRegion or MeshesContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.sector_mesh.connect(my_sector_mesh)
>>> # or
>>> op.inputs.sector_mesh(my_sector_mesh)
"""
return self._sector_mesh
@property
def read_cyclic(self):
"""Allows to connect read_cyclic input to the operator.
If 0 cyclic symmetry is ignored, if 1 cyclic
sector is read, if 2 cyclic expansion
is done, if 3 cyclic expansion is
done and stages are merged (default
is 1)
Parameters
----------
my_read_cyclic : int
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.read_cyclic.connect(my_read_cyclic)
>>> # or
>>> op.inputs.read_cyclic(my_read_cyclic)
"""
return self._read_cyclic
@property
def expanded_meshed_region(self):
"""Allows to connect expanded_meshed_region input to the operator.
Mesh expanded.
Parameters
----------
my_expanded_meshed_region : MeshedRegion or MeshesContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region)
>>> # or
>>> op.inputs.expanded_meshed_region(my_expanded_meshed_region)
"""
return self._expanded_meshed_region
@property
def cyclic_support(self):
"""Allows to connect cyclic_support input to the operator.
Parameters
----------
my_cyclic_support : CyclicSupport
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> op.inputs.cyclic_support.connect(my_cyclic_support)
>>> # or
>>> op.inputs.cyclic_support(my_cyclic_support)
"""
return self._cyclic_support
class OutputsCyclicVolume(_Outputs):
"""Intermediate class used to get outputs from
cyclic_volume operator.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> # Connect inputs : op.inputs. ...
>>> result_fields_container = op.outputs.fields_container()
>>> result_expanded_meshes = op.outputs.expanded_meshes()
"""
def __init__(self, op: Operator):
super().__init__(cyclic_volume._spec().outputs, op)
self._fields_container = Output(cyclic_volume._spec().output_pin(0), 0, op)
self._outputs.append(self._fields_container)
self._expanded_meshes = Output(cyclic_volume._spec().output_pin(1), 1, op)
self._outputs.append(self._expanded_meshes)
@property
def fields_container(self):
"""Allows to get fields_container output of the operator
Returns
----------
my_fields_container : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> # Connect inputs : op.inputs. ...
>>> result_fields_container = op.outputs.fields_container()
""" # noqa: E501
return self._fields_container
@property
def expanded_meshes(self):
"""Allows to get expanded_meshes output of the operator
Returns
----------
my_expanded_meshes : MeshesContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
>>> # Connect inputs : op.inputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
""" # noqa: E501
return self._expanded_meshes
| 35.349123 | 88 | 0.604794 |
acf6109ac4ee4bdd897c5255bd535cf887876d48 | 4,679 | py | Python | tests/milvus_http_test/collections/test_get_info.py | mabergerx/milvus | 1c48f7f4e47252f8aa2b3c09c9289a9444f5887c | [
"Apache-2.0"
] | 3 | 2020-03-13T10:10:08.000Z | 2022-01-30T07:12:36.000Z | tests/milvus_http_test/collections/test_get_info.py | mppmys/milvus | 8605a7f12276c4a9b3b2772535c9575b199bb101 | [
"Apache-2.0"
] | 2 | 2020-08-20T07:17:50.000Z | 2020-08-21T04:21:34.000Z | tests/milvus_http_test/collections/test_get_info.py | mppmys/milvus | 8605a7f12276c4a9b3b2772535c9575b199bb101 | [
"Apache-2.0"
] | 1 | 2020-10-26T12:00:39.000Z | 2020-10-26T12:00:39.000Z | import logging
import pytest
import time
import copy
from utils import *
from constants import *
uid = "info_collection"
class TestInfoBase:
"""
******************************************************************
The following cases are used to test `get_collection_info` function, no data in collection
******************************************************************
"""
def test_get_collection_info(self, client, collection):
"""
target: test get collection info with normal collection
method: create collection with default fields and get collection info
expected: no exception raised, and value returned correct
"""
info = client.info_collection(collection)
assert info['count'] == 0
assert info['auto_id'] == True
assert info['segment_row_limit'] == default_segment_row_limit
assert len(info["fields"]) == 3
for field in info['fields']:
if field['type'] == 'INT64':
assert field['name'] == default_int_field_name
if field['type'] == 'FLOAT':
assert field['name'] == default_float_field_name
if field['type'] == 'VECTOR_FLOAT':
assert field['name'] == default_float_vec_field_name
def test_get_collection_info_segment_row_limit(self, client, collection):
"""
target: test get collection info with non-default segment row limit
method: create collection with non-default segment row limit and get collection info
expected: no exception raised
"""
segment_row_limit = 4096
collection_name = gen_unique_str(uid)
fields = copy.deepcopy(default_fields)
fields["segment_row_limit"] = segment_row_limit
client.create_collection(collection_name, fields)
client.insert(collection, default_entities)
client.flush([collection])
info = client.info_collection(collection_name)
assert info['segment_row_limit'] == segment_row_limit
def test_get_collection_info_id_collection(self, client, id_collection):
"""
target: test get collection info with id collection
method: create id collection with auto_id=False and get collection info
expected: no exception raised
"""
info = client.info_collection(id_collection)
assert info['count'] == 0
assert info['auto_id'] == False
assert info['segment_row_limit'] == default_segment_row_limit
assert len(info["fields"]) == 3
def test_get_collection_info_with_collection_not_existed(self, client):
"""
target: test get collection info with not existed collection
method: call get collection info with random collection name which not in db
expected: not ok
"""
collection_name = gen_unique_str(uid)
assert not client.info_collection(collection_name)
@pytest.fixture(
scope="function",
params=[
1,
"12-s",
" ",
"12 s",
" siede ",
"(mn)",
"中文",
"a".join("a" for i in range(256))
]
)
def get_invalid_collection_name(self, request):
yield request.param
def test_get_collection_info_collection_name_invalid(self, client, get_invalid_collection_name):
collection_name = get_invalid_collection_name
assert not client.info_collection(collection_name)
def test_row_count_after_insert(self, client, collection):
"""
target: test the change of collection row count after insert data
method: insert entities to collection and get collection info
expected: row count increase
"""
info = client.info_collection(collection)
assert info['count'] == 0
assert client.insert(collection, default_entities)
client.flush([collection])
info = client.info_collection(collection)
assert info['count'] == default_nb
def test_get_collection_info_after_index_created(self, client, collection):
"""
target: test index of collection info after index created
method: create index and get collection info
expected: no exception raised
"""
index = {"index_type": "IVF_FLAT", "params": {"nlist": 128}, "metric_type": "L2"}
res = client.create_index(collection, default_float_vec_field_name, index)
info = client.info_collection(collection)
for field in info['fields']:
if field['name'] == default_float_vec_field_name:
assert field['index_params'] == index
| 38.991667 | 100 | 0.627485 |
acf610ae211e0a618688ea5d11cadfbda4492062 | 6,888 | py | Python | starfish/test/spots/detector/test_spot_detection.py | nicopierson/starfish | 7192ae2adc0669cd6ebe5e9e898e0b978d5553da | [
"MIT"
] | 3 | 2020-09-01T12:18:20.000Z | 2021-05-18T03:50:31.000Z | starfish/test/spots/detector/test_spot_detection.py | nicopierson/starfish | 7192ae2adc0669cd6ebe5e9e898e0b978d5553da | [
"MIT"
] | null | null | null | starfish/test/spots/detector/test_spot_detection.py | nicopierson/starfish | 7192ae2adc0669cd6ebe5e9e898e0b978d5553da | [
"MIT"
] | null | null | null | import numpy as np
import pytest
from starfish.imagestack.imagestack import ImageStack
from starfish.spots._detector._base import SpotFinderAlgorithmBase
from starfish.spots._detector.blob import BlobDetector
from starfish.spots._detector.detect import detect_spots
from starfish.spots._detector.local_max_peak_finder import LocalMaxPeakFinder
from starfish.spots._detector.trackpy_local_max_peak_finder import TrackpyLocalMaxPeakFinder
from starfish.test.test_utils import (
two_spot_informative_blank_coded_data_factory,
two_spot_one_hot_coded_data_factory,
two_spot_sparse_coded_data_factory,
)
from starfish.types import Axes, Features
# verify all spot finders handle different coding types
_, ONE_HOT_IMAGESTACK, ONE_HOT_MAX_INTENSITY = two_spot_one_hot_coded_data_factory()
_, SPARSE_IMAGESTACK, SPARSE_MAX_INTENSITY = two_spot_sparse_coded_data_factory()
_, BLANK_IMAGESTACK, BLANK_MAX_INTENSITY = two_spot_informative_blank_coded_data_factory()
# make sure that all spot finders handle empty arrays
EMPTY_IMAGESTACK = ImageStack.from_numpy_array(np.zeros((4, 2, 10, 100, 100), dtype=np.float32))
def simple_gaussian_spot_detector() -> BlobDetector:
"""create a basic gaussian spot detector"""
return BlobDetector(min_sigma=1, max_sigma=4, num_sigma=5, threshold=0, measurement_type='max')
def simple_trackpy_local_max_spot_detector() -> TrackpyLocalMaxPeakFinder:
"""create a basic local max peak finder"""
return TrackpyLocalMaxPeakFinder(
spot_diameter=3,
min_mass=0.01,
max_size=10,
separation=2,
)
def simple_local_max_spot_detector() -> LocalMaxPeakFinder:
return LocalMaxPeakFinder(
min_distance=6,
stringency=0,
min_obj_area=0,
max_obj_area=np.inf,
threshold=0
)
# initialize spot detectors
gaussian_spot_detector = simple_gaussian_spot_detector()
trackpy_local_max_spot_detector = simple_trackpy_local_max_spot_detector()
local_max_spot_detector = simple_local_max_spot_detector()
# test parameterization
test_parameters = (
'data_stack, spot_detector, radius_is_gyration, max_intensity',
[
(ONE_HOT_IMAGESTACK, gaussian_spot_detector, False, ONE_HOT_MAX_INTENSITY),
(ONE_HOT_IMAGESTACK, trackpy_local_max_spot_detector, True, ONE_HOT_MAX_INTENSITY),
(ONE_HOT_IMAGESTACK, local_max_spot_detector, False, ONE_HOT_MAX_INTENSITY),
(SPARSE_IMAGESTACK, gaussian_spot_detector, False, SPARSE_MAX_INTENSITY),
(SPARSE_IMAGESTACK, trackpy_local_max_spot_detector, True, SPARSE_MAX_INTENSITY),
(SPARSE_IMAGESTACK, local_max_spot_detector, False, SPARSE_MAX_INTENSITY),
(BLANK_IMAGESTACK, gaussian_spot_detector, False, BLANK_MAX_INTENSITY),
(BLANK_IMAGESTACK, trackpy_local_max_spot_detector, True, BLANK_MAX_INTENSITY),
(BLANK_IMAGESTACK, local_max_spot_detector, False, BLANK_MAX_INTENSITY),
]
)
@pytest.mark.parametrize(*test_parameters)
def test_spot_detection_with_reference_image(
data_stack: ImageStack,
spot_detector: SpotFinderAlgorithmBase,
radius_is_gyration: bool,
max_intensity: float,
):
"""This testing method uses a reference image to identify spot locations."""
def call_detect_spots(stack):
reference_image_mp = stack.max_proj(Axes.CH, Axes.ROUND)
reference_image_mp_numpy = reference_image_mp._squeezed_numpy(Axes.CH, Axes.ROUND)
return detect_spots(
data_stack=stack,
spot_finding_method=spot_detector.image_to_spots,
reference_image=reference_image_mp_numpy,
measurement_function=np.max,
radius_is_gyration=radius_is_gyration,
n_processes=1,
)
intensity_table = call_detect_spots(data_stack)
assert intensity_table.sizes[Features.AXIS] == 2, "wrong number of spots detected"
expected = [max_intensity * 2, max_intensity * 2]
assert np.allclose(intensity_table.sum((Axes.ROUND, Axes.CH)).values, expected), \
"wrong spot intensities detected"
# verify this execution strategy produces an empty intensitytable when called with a blank image
empty_intensity_table = call_detect_spots(EMPTY_IMAGESTACK)
assert empty_intensity_table.sizes[Features.AXIS] == 0
@pytest.mark.parametrize(*test_parameters)
def test_spot_detection_with_reference_image_from_max_projection(
data_stack: ImageStack,
spot_detector: SpotFinderAlgorithmBase,
radius_is_gyration: bool,
max_intensity: float,
):
"""This testing method builds a reference image to identify spot locations."""
def call_detect_spots(stack):
return detect_spots(
data_stack=stack,
spot_finding_method=spot_detector.image_to_spots,
reference_image_from_max_projection=True,
measurement_function=np.max,
radius_is_gyration=radius_is_gyration,
n_processes=1,
)
intensity_table = call_detect_spots(data_stack)
assert intensity_table.sizes[Features.AXIS] == 2, "wrong number of spots detected"
expected = [max_intensity * 2, max_intensity * 2]
assert np.allclose(intensity_table.sum((Axes.ROUND, Axes.CH)).values, expected), \
"wrong spot intensities detected"
empty_intensity_table = call_detect_spots(EMPTY_IMAGESTACK)
assert empty_intensity_table.sizes[Features.AXIS] == 0
@pytest.mark.parametrize(*test_parameters)
def test_spot_finding_no_reference_image(
data_stack: ImageStack,
spot_detector: SpotFinderAlgorithmBase,
radius_is_gyration: bool,
max_intensity: float,
):
"""
This testing method does not provide a reference image, and should therefore check for spots
in each (round, ch) combination in sequence. With the given input, it should detect 4 spots.
"""
def call_detect_spots(stack):
return detect_spots(
data_stack=stack,
spot_finding_method=spot_detector.image_to_spots,
measurement_function=np.max,
radius_is_gyration=radius_is_gyration,
n_processes=1,
)
intensity_table = call_detect_spots(data_stack)
assert intensity_table.sizes[Features.AXIS] == 4, "wrong number of spots detected"
expected = [max_intensity] * 4
assert np.allclose(intensity_table.sum((Axes.ROUND, Axes.CH)).values, expected), \
"wrong spot intensities detected"
# verify this execution strategy produces an empty intensitytable when called with a blank image
empty_intensity_table = detect_spots(
data_stack=EMPTY_IMAGESTACK,
spot_finding_method=spot_detector.image_to_spots,
measurement_function=np.max,
radius_is_gyration=radius_is_gyration
)
empty_intensity_table = call_detect_spots(EMPTY_IMAGESTACK)
assert empty_intensity_table.sizes[Features.AXIS] == 0
| 40.046512 | 100 | 0.749564 |
acf610e799b0b45cb838b3492c1c7c96dcd67da6 | 30,528 | py | Python | electrum/gui/qt/history_list.py | ShadowMyst/electrum | 4c6379a9365dfaeefe09ee2d52ee9332b9bf6129 | [
"MIT"
] | null | null | null | electrum/gui/qt/history_list.py | ShadowMyst/electrum | 4c6379a9365dfaeefe09ee2d52ee9332b9bf6129 | [
"MIT"
] | 2 | 2021-06-02T00:45:46.000Z | 2021-11-15T17:49:56.000Z | electrum/gui/qt/history_list.py | ShadowMyst/electrum | 4c6379a9365dfaeefe09ee2d52ee9332b9bf6129 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import webbrowser
import datetime
from datetime import date
from typing import TYPE_CHECKING, Tuple, Dict
import threading
from enum import IntEnum
from decimal import Decimal
from electrum.address_synchronizer import TX_HEIGHT_LOCAL
from electrum.i18n import _
from electrum.util import (block_explorer_URL, profiler, print_error, TxMinedInfo,
OrderedDictWithIndex, PrintError)
from .util import *
if TYPE_CHECKING:
from electrum.wallet import Abstract_Wallet
try:
from electrum.plot import plot_history, NothingToPlotException
except:
print_error("qt/history_list: could not import electrum.plot. This feature needs matplotlib to be installed.")
plot_history = None
# note: this list needs to be kept in sync with another in kivy
TX_ICONS = [
"unconfirmed.png",
"warning.png",
"unconfirmed.png",
"offline_tx.png",
"clock1.png",
"clock2.png",
"clock3.png",
"clock4.png",
"clock5.png",
"confirmed.png",
]
class HistoryColumns(IntEnum):
STATUS_ICON = 0
STATUS_TEXT = 1
DESCRIPTION = 2
COIN_VALUE = 3
RUNNING_COIN_BALANCE = 4
FIAT_VALUE = 5
FIAT_ACQ_PRICE = 6
FIAT_CAP_GAINS = 7
TXID = 8
class HistorySortModel(QSortFilterProxyModel):
def lessThan(self, source_left: QModelIndex, source_right: QModelIndex):
item1 = self.sourceModel().data(source_left, Qt.UserRole)
item2 = self.sourceModel().data(source_right, Qt.UserRole)
if item1 is None or item2 is None:
raise Exception(f'UserRole not set for column {source_left.column()}')
v1 = item1.value()
v2 = item2.value()
if v1 is None or isinstance(v1, Decimal) and v1.is_nan(): v1 = -float("inf")
if v2 is None or isinstance(v2, Decimal) and v2.is_nan(): v2 = -float("inf")
try:
return v1 < v2
except:
return False
class HistoryModel(QAbstractItemModel, PrintError):
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.view = None # type: HistoryList
self.transactions = OrderedDictWithIndex()
self.tx_status_cache = {} # type: Dict[str, Tuple[int, str]]
self.summary = None
def set_view(self, history_list: 'HistoryList'):
# FIXME HistoryModel and HistoryList mutually depend on each other.
# After constructing both, this method needs to be called.
self.view = history_list # type: HistoryList
self.set_visibility_of_columns()
def columnCount(self, parent: QModelIndex):
return len(HistoryColumns)
def rowCount(self, parent: QModelIndex):
return len(self.transactions)
def index(self, row: int, column: int, parent: QModelIndex):
return self.createIndex(row, column)
def data(self, index: QModelIndex, role: Qt.ItemDataRole) -> QVariant:
# note: this method is performance-critical.
# it is called a lot, and so must run extremely fast.
assert index.isValid()
col = index.column()
tx_item = self.transactions.value_from_pos(index.row())
tx_hash = tx_item['txid']
conf = tx_item['confirmations']
txpos = tx_item['txpos_in_block'] or 0
height = tx_item['height']
try:
status, status_str = self.tx_status_cache[tx_hash]
except KeyError:
tx_mined_info = self.tx_mined_info_from_tx_item(tx_item)
status, status_str = self.parent.wallet.get_tx_status(tx_hash, tx_mined_info)
if role == Qt.UserRole:
# for sorting
d = {
HistoryColumns.STATUS_ICON:
# height breaks ties for unverified txns
# txpos breaks ties for verified same block txns
(status, conf, -height, -txpos),
HistoryColumns.STATUS_TEXT: status_str,
HistoryColumns.DESCRIPTION: tx_item['label'],
HistoryColumns.COIN_VALUE: tx_item['value'].value,
HistoryColumns.RUNNING_COIN_BALANCE: tx_item['balance'].value,
HistoryColumns.FIAT_VALUE:
tx_item['fiat_value'].value if 'fiat_value' in tx_item else None,
HistoryColumns.FIAT_ACQ_PRICE:
tx_item['acquisition_price'].value if 'acquisition_price' in tx_item else None,
HistoryColumns.FIAT_CAP_GAINS:
tx_item['capital_gain'].value if 'capital_gain' in tx_item else None,
HistoryColumns.TXID: tx_hash,
}
return QVariant(d[col])
if role not in (Qt.DisplayRole, Qt.EditRole):
if col == HistoryColumns.STATUS_ICON and role == Qt.DecorationRole:
return QVariant(self.view.icon_cache.get(":icons/" + TX_ICONS[status]))
elif col == HistoryColumns.STATUS_ICON and role == Qt.ToolTipRole:
return QVariant(str(conf) + _(" confirmation" + ("s" if conf != 1 else "")))
elif col > HistoryColumns.DESCRIPTION and role == Qt.TextAlignmentRole:
return QVariant(Qt.AlignRight | Qt.AlignVCenter)
elif col != HistoryColumns.STATUS_TEXT and role == Qt.FontRole:
monospace_font = QFont(MONOSPACE_FONT)
return QVariant(monospace_font)
elif col == HistoryColumns.DESCRIPTION and role == Qt.DecorationRole \
and self.parent.wallet.invoices.paid.get(tx_hash):
return QVariant(self.view.icon_cache.get(":icons/seal"))
elif col in (HistoryColumns.DESCRIPTION, HistoryColumns.COIN_VALUE) \
and role == Qt.ForegroundRole and tx_item['value'].value < 0:
red_brush = QBrush(QColor("#BC1E1E"))
return QVariant(red_brush)
elif col == HistoryColumns.FIAT_VALUE and role == Qt.ForegroundRole \
and not tx_item.get('fiat_default') and tx_item.get('fiat_value') is not None:
blue_brush = QBrush(QColor("#1E1EFF"))
return QVariant(blue_brush)
return QVariant()
if col == HistoryColumns.STATUS_TEXT:
return QVariant(status_str)
elif col == HistoryColumns.DESCRIPTION:
return QVariant(tx_item['label'])
elif col == HistoryColumns.COIN_VALUE:
value = tx_item['value'].value
v_str = self.parent.format_amount(value, is_diff=True, whitespaces=True)
return QVariant(v_str)
elif col == HistoryColumns.RUNNING_COIN_BALANCE:
balance = tx_item['balance'].value
balance_str = self.parent.format_amount(balance, whitespaces=True)
return QVariant(balance_str)
elif col == HistoryColumns.FIAT_VALUE and 'fiat_value' in tx_item:
value_str = self.parent.fx.format_fiat(tx_item['fiat_value'].value)
return QVariant(value_str)
elif col == HistoryColumns.FIAT_ACQ_PRICE and \
tx_item['value'].value < 0 and 'acquisition_price' in tx_item:
# fixme: should use is_mine
acq = tx_item['acquisition_price'].value
return QVariant(self.parent.fx.format_fiat(acq))
elif col == HistoryColumns.FIAT_CAP_GAINS and 'capital_gain' in tx_item:
cg = tx_item['capital_gain'].value
return QVariant(self.parent.fx.format_fiat(cg))
elif col == HistoryColumns.TXID:
return QVariant(tx_hash)
return QVariant()
def parent(self, index: QModelIndex):
return QModelIndex()
def hasChildren(self, index: QModelIndex):
return not index.isValid()
def update_label(self, row):
tx_item = self.transactions.value_from_pos(row)
tx_item['label'] = self.parent.wallet.get_label(tx_item['txid'])
topLeft = bottomRight = self.createIndex(row, 2)
self.dataChanged.emit(topLeft, bottomRight, [Qt.DisplayRole])
def get_domain(self):
'''Overridden in address_dialog.py'''
return self.parent.wallet.get_addresses()
@profiler
def refresh(self, reason: str):
self.print_error(f"refreshing... reason: {reason}")
assert self.parent.gui_thread == threading.current_thread(), 'must be called from GUI thread'
assert self.view, 'view not set'
selected = self.view.selectionModel().currentIndex()
selected_row = None
if selected:
selected_row = selected.row()
fx = self.parent.fx
if fx: fx.history_used_spot = False
r = self.parent.wallet.get_full_history(domain=self.get_domain(), from_timestamp=None, to_timestamp=None, fx=fx)
self.set_visibility_of_columns()
if r['transactions'] == list(self.transactions.values()):
return
old_length = len(self.transactions)
if old_length != 0:
self.beginRemoveRows(QModelIndex(), 0, old_length)
self.transactions.clear()
self.endRemoveRows()
self.beginInsertRows(QModelIndex(), 0, len(r['transactions'])-1)
for tx_item in r['transactions']:
txid = tx_item['txid']
self.transactions[txid] = tx_item
self.endInsertRows()
if selected_row:
self.view.selectionModel().select(self.createIndex(selected_row, 0), QItemSelectionModel.Rows | QItemSelectionModel.SelectCurrent)
f = self.view.current_filter
if f:
self.view.filter(f)
# update summary
self.summary = r['summary']
if not self.view.years and self.transactions:
start_date = date.today()
end_date = date.today()
if len(self.transactions) > 0:
start_date = self.transactions.value_from_pos(0).get('date') or start_date
end_date = self.transactions.value_from_pos(len(self.transactions) - 1).get('date') or end_date
self.view.years = [str(i) for i in range(start_date.year, end_date.year + 1)]
self.view.period_combo.insertItems(1, self.view.years)
# update tx_status_cache
self.tx_status_cache.clear()
for txid, tx_item in self.transactions.items():
tx_mined_info = self.tx_mined_info_from_tx_item(tx_item)
self.tx_status_cache[txid] = self.parent.wallet.get_tx_status(txid, tx_mined_info)
def set_visibility_of_columns(self):
def set_visible(col: int, b: bool):
self.view.showColumn(col) if b else self.view.hideColumn(col)
# txid
set_visible(HistoryColumns.TXID, False)
# fiat
history = self.parent.fx.show_history()
cap_gains = self.parent.fx.get_history_capital_gains_config()
set_visible(HistoryColumns.FIAT_VALUE, history)
set_visible(HistoryColumns.FIAT_ACQ_PRICE, history and cap_gains)
set_visible(HistoryColumns.FIAT_CAP_GAINS, history and cap_gains)
def update_fiat(self, row, idx):
tx_item = self.transactions.value_from_pos(row)
key = tx_item['txid']
fee = tx_item.get('fee')
value = tx_item['value'].value
fiat_fields = self.parent.wallet.get_tx_item_fiat(key, value, self.parent.fx, fee.value if fee else None)
tx_item.update(fiat_fields)
self.dataChanged.emit(idx, idx, [Qt.DisplayRole, Qt.ForegroundRole])
def update_tx_mined_status(self, tx_hash: str, tx_mined_info: TxMinedInfo):
try:
row = self.transactions.pos_from_key(tx_hash)
tx_item = self.transactions[tx_hash]
except KeyError:
return
self.tx_status_cache[tx_hash] = self.parent.wallet.get_tx_status(tx_hash, tx_mined_info)
tx_item.update({
'confirmations': tx_mined_info.conf,
'timestamp': tx_mined_info.timestamp,
'txpos_in_block': tx_mined_info.txpos,
})
topLeft = self.createIndex(row, 0)
bottomRight = self.createIndex(row, len(HistoryColumns) - 1)
self.dataChanged.emit(topLeft, bottomRight)
def on_fee_histogram(self):
for tx_hash, tx_item in list(self.transactions.items()):
tx_mined_info = self.tx_mined_info_from_tx_item(tx_item)
if tx_mined_info.conf > 0:
# note: we could actually break here if we wanted to rely on the order of txns in self.transactions
continue
self.update_tx_mined_status(tx_hash, tx_mined_info)
def headerData(self, section: int, orientation: Qt.Orientation, role: Qt.ItemDataRole):
assert orientation == Qt.Horizontal
if role != Qt.DisplayRole:
return None
fx = self.parent.fx
fiat_title = 'n/a fiat value'
fiat_acq_title = 'n/a fiat acquisition price'
fiat_cg_title = 'n/a fiat capital gains'
if fx and fx.show_history():
fiat_title = '%s '%fx.ccy + _('Value')
fiat_acq_title = '%s '%fx.ccy + _('Acquisition price')
fiat_cg_title = '%s '%fx.ccy + _('Capital Gains')
return {
HistoryColumns.STATUS_ICON: '',
HistoryColumns.STATUS_TEXT: _('Date'),
HistoryColumns.DESCRIPTION: _('Description'),
HistoryColumns.COIN_VALUE: _('Amount'),
HistoryColumns.RUNNING_COIN_BALANCE: _('Balance'),
HistoryColumns.FIAT_VALUE: fiat_title,
HistoryColumns.FIAT_ACQ_PRICE: fiat_acq_title,
HistoryColumns.FIAT_CAP_GAINS: fiat_cg_title,
HistoryColumns.TXID: 'TXID',
}[section]
def flags(self, idx):
extra_flags = Qt.NoItemFlags # type: Qt.ItemFlag
if idx.column() in self.view.editable_columns:
extra_flags |= Qt.ItemIsEditable
return super().flags(idx) | extra_flags
@staticmethod
def tx_mined_info_from_tx_item(tx_item):
tx_mined_info = TxMinedInfo(height=tx_item['height'],
conf=tx_item['confirmations'],
timestamp=tx_item['timestamp'])
return tx_mined_info
class HistoryList(MyTreeView, AcceptFileDragDrop):
filter_columns = [HistoryColumns.STATUS_TEXT,
HistoryColumns.DESCRIPTION,
HistoryColumns.COIN_VALUE,
HistoryColumns.TXID]
def tx_item_from_proxy_row(self, proxy_row):
hm_idx = self.model().mapToSource(self.model().index(proxy_row, 0))
return self.hm.transactions.value_from_pos(hm_idx.row())
def should_hide(self, proxy_row):
if self.start_timestamp and self.end_timestamp:
tx_item = self.tx_item_from_proxy_row(proxy_row)
date = tx_item['date']
if date:
in_interval = self.start_timestamp <= date <= self.end_timestamp
if not in_interval:
return True
return False
def __init__(self, parent, model: HistoryModel):
super().__init__(parent, self.create_menu, stretch_column=HistoryColumns.DESCRIPTION)
self.hm = model
self.proxy = HistorySortModel(self)
self.proxy.setSourceModel(model)
self.setModel(self.proxy)
self.config = parent.config
AcceptFileDragDrop.__init__(self, ".txn")
self.setSortingEnabled(True)
self.start_timestamp = None
self.end_timestamp = None
self.years = []
self.create_toolbar_buttons()
self.wallet = self.parent.wallet # type: Abstract_Wallet
self.sortByColumn(HistoryColumns.STATUS_ICON, Qt.AscendingOrder)
self.editable_columns |= {HistoryColumns.FIAT_VALUE}
self.header().setStretchLastSection(False)
for col in HistoryColumns:
sm = QHeaderView.Stretch if col == self.stretch_column else QHeaderView.ResizeToContents
self.header().setSectionResizeMode(col, sm)
def format_date(self, d):
return str(datetime.date(d.year, d.month, d.day)) if d else _('None')
def on_combo(self, x):
s = self.period_combo.itemText(x)
x = s == _('Custom')
self.start_button.setEnabled(x)
self.end_button.setEnabled(x)
if s == _('All'):
self.start_timestamp = None
self.end_timestamp = None
self.start_button.setText("-")
self.end_button.setText("-")
else:
try:
year = int(s)
except:
return
self.start_timestamp = start_date = datetime.datetime(year, 1, 1)
self.end_timestamp = end_date = datetime.datetime(year+1, 1, 1)
self.start_button.setText(_('From') + ' ' + self.format_date(start_date))
self.end_button.setText(_('To') + ' ' + self.format_date(end_date))
self.hide_rows()
def create_toolbar_buttons(self):
self.period_combo = QComboBox()
self.start_button = QPushButton('-')
self.start_button.pressed.connect(self.select_start_date)
self.start_button.setEnabled(False)
self.end_button = QPushButton('-')
self.end_button.pressed.connect(self.select_end_date)
self.end_button.setEnabled(False)
self.period_combo.addItems([_('All'), _('Custom')])
self.period_combo.activated.connect(self.on_combo)
def get_toolbar_buttons(self):
return self.period_combo, self.start_button, self.end_button
def on_hide_toolbar(self):
self.start_timestamp = None
self.end_timestamp = None
self.hide_rows()
def save_toolbar_state(self, state, config):
config.set_key('show_toolbar_history', state)
def select_start_date(self):
self.start_timestamp = self.select_date(self.start_button)
self.hide_rows()
def select_end_date(self):
self.end_timestamp = self.select_date(self.end_button)
self.hide_rows()
def select_date(self, button):
d = WindowModalDialog(self, _("Select date"))
d.setMinimumSize(600, 150)
d.date = None
vbox = QVBoxLayout()
def on_date(date):
d.date = date
cal = QCalendarWidget()
cal.setGridVisible(True)
cal.clicked[QDate].connect(on_date)
vbox.addWidget(cal)
vbox.addLayout(Buttons(OkButton(d), CancelButton(d)))
d.setLayout(vbox)
if d.exec_():
if d.date is None:
return None
date = d.date.toPyDate()
button.setText(self.format_date(date))
return datetime.datetime(date.year, date.month, date.day)
def show_summary(self):
h = self.model().sourceModel().summary
if not h:
self.parent.show_message(_("Nothing to summarize."))
return
start_date = h.get('start_date')
end_date = h.get('end_date')
format_amount = lambda x: self.parent.format_amount(x.value) + ' ' + self.parent.base_unit()
d = WindowModalDialog(self, _("Summary"))
d.setMinimumSize(600, 150)
vbox = QVBoxLayout()
grid = QGridLayout()
grid.addWidget(QLabel(_("Start")), 0, 0)
grid.addWidget(QLabel(self.format_date(start_date)), 0, 1)
grid.addWidget(QLabel(str(h.get('start_fiat_value')) + '/BTC'), 0, 2)
grid.addWidget(QLabel(_("Initial balance")), 1, 0)
grid.addWidget(QLabel(format_amount(h['start_balance'])), 1, 1)
grid.addWidget(QLabel(str(h.get('start_fiat_balance'))), 1, 2)
grid.addWidget(QLabel(_("End")), 2, 0)
grid.addWidget(QLabel(self.format_date(end_date)), 2, 1)
grid.addWidget(QLabel(str(h.get('end_fiat_value')) + '/BTC'), 2, 2)
grid.addWidget(QLabel(_("Final balance")), 4, 0)
grid.addWidget(QLabel(format_amount(h['end_balance'])), 4, 1)
grid.addWidget(QLabel(str(h.get('end_fiat_balance'))), 4, 2)
grid.addWidget(QLabel(_("Income")), 5, 0)
grid.addWidget(QLabel(format_amount(h.get('income'))), 5, 1)
grid.addWidget(QLabel(str(h.get('fiat_income'))), 5, 2)
grid.addWidget(QLabel(_("Expenditures")), 6, 0)
grid.addWidget(QLabel(format_amount(h.get('expenditures'))), 6, 1)
grid.addWidget(QLabel(str(h.get('fiat_expenditures'))), 6, 2)
grid.addWidget(QLabel(_("Capital gains")), 7, 0)
grid.addWidget(QLabel(str(h.get('capital_gains'))), 7, 2)
grid.addWidget(QLabel(_("Unrealized gains")), 8, 0)
grid.addWidget(QLabel(str(h.get('unrealized_gains', ''))), 8, 2)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
def plot_history_dialog(self):
if plot_history is None:
self.parent.show_message(
_("Can't plot history.") + '\n' +
_("Perhaps some dependencies are missing...") + " (matplotlib?)")
return
try:
plt = plot_history(list(self.hm.transactions.values()))
plt.show()
except NothingToPlotException as e:
self.parent.show_message(str(e))
def on_edited(self, index, user_role, text):
index = self.model().mapToSource(index)
row, column = index.row(), index.column()
tx_item = self.hm.transactions.value_from_pos(row)
key = tx_item['txid']
if column == HistoryColumns.DESCRIPTION:
if self.wallet.set_label(key, text): #changed
self.hm.update_label(row)
self.parent.update_completions()
elif column == HistoryColumns.FIAT_VALUE:
self.wallet.set_fiat_value(key, self.parent.fx.ccy, text, self.parent.fx, tx_item['value'].value)
value = tx_item['value'].value
if value is not None:
self.hm.update_fiat(row, index)
else:
assert False
def mouseDoubleClickEvent(self, event: QMouseEvent):
idx = self.indexAt(event.pos())
if not idx.isValid():
return
tx_item = self.tx_item_from_proxy_row(idx.row())
if self.hm.flags(self.model().mapToSource(idx)) & Qt.ItemIsEditable:
super().mouseDoubleClickEvent(event)
else:
self.show_transaction(tx_item['txid'])
def show_transaction(self, tx_hash):
tx = self.wallet.transactions.get(tx_hash)
if not tx:
return
label = self.wallet.get_label(tx_hash) or None # prefer 'None' if not defined (force tx dialog to hide Description field if missing)
self.parent.show_transaction(tx, label)
def create_menu(self, position: QPoint):
org_idx: QModelIndex = self.indexAt(position)
idx = self.proxy.mapToSource(org_idx)
if not idx.isValid():
# can happen e.g. before list is populated for the first time
return
tx_item = self.hm.transactions.value_from_pos(idx.row())
column = idx.column()
if column == HistoryColumns.STATUS_ICON:
column_title = _('Transaction ID')
column_data = tx_item['txid']
else:
column_title = self.hm.headerData(column, Qt.Horizontal, Qt.DisplayRole)
column_data = self.hm.data(idx, Qt.DisplayRole).value()
tx_hash = tx_item['txid']
tx = self.wallet.transactions[tx_hash]
tx_URL = block_explorer_URL(self.config, 'tx', tx_hash)
height = self.wallet.get_tx_height(tx_hash).height
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
is_unconfirmed = height <= 0
pr_key = self.wallet.invoices.paid.get(tx_hash)
menu = QMenu()
if height == TX_HEIGHT_LOCAL:
menu.addAction(_("Remove"), lambda: self.remove_local_tx(tx_hash))
menu.addAction(_("Copy {}").format(column_title), lambda: self.parent.app.clipboard().setText(column_data))
for c in self.editable_columns:
if self.isColumnHidden(c): continue
label = self.hm.headerData(c, Qt.Horizontal, Qt.DisplayRole)
# TODO use siblingAtColumn when min Qt version is >=5.11
persistent = QPersistentModelIndex(org_idx.sibling(org_idx.row(), c))
menu.addAction(_("Edit {}").format(label), lambda p=persistent: self.edit(QModelIndex(p)))
menu.addAction(_("Details"), lambda: self.show_transaction(tx_hash))
if is_unconfirmed and tx:
# note: the current implementation of RBF *needs* the old tx fee
rbf = is_mine and not tx.is_final() and fee is not None
if rbf:
menu.addAction(_("Increase fee"), lambda: self.parent.bump_fee_dialog(tx))
else:
child_tx = self.wallet.cpfp(tx, 0)
if child_tx:
menu.addAction(_("Child pays for parent"), lambda: self.parent.cpfp(tx, child_tx))
if pr_key:
menu.addAction(self.icon_cache.get(":icons/seal"), _("View invoice"), lambda: self.parent.show_invoice(pr_key))
if tx_URL:
menu.addAction(_("View on block explorer"), lambda: webbrowser.open(tx_URL))
menu.exec_(self.viewport().mapToGlobal(position))
def remove_local_tx(self, delete_tx):
to_delete = {delete_tx}
to_delete |= self.wallet.get_depending_transactions(delete_tx)
question = _("Are you sure you want to remove this transaction?")
if len(to_delete) > 1:
question = _(
"Are you sure you want to remove this transaction and {} child transactions?".format(len(to_delete) - 1)
)
answer = QMessageBox.question(self.parent, _("Please confirm"), question, QMessageBox.Yes, QMessageBox.No)
if answer == QMessageBox.No:
return
for tx in to_delete:
self.wallet.remove_transaction(tx)
self.wallet.save_transactions(write=True)
# need to update at least: history_list, utxo_list, address_list
self.parent.need_update.set()
def onFileAdded(self, fn):
try:
with open(fn) as f:
tx = self.parent.tx_from_text(f.read())
self.parent.save_transaction_into_wallet(tx)
except IOError as e:
self.parent.show_error(e)
def export_history_dialog(self):
d = WindowModalDialog(self, _('Export History'))
d.setMinimumSize(400, 200)
vbox = QVBoxLayout(d)
defaultname = os.path.expanduser('~/electrum-history.csv')
select_msg = _('Select file to export your wallet transactions to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
vbox.addStretch(1)
hbox = Buttons(CancelButton(d), OkButton(d, _('Export')))
vbox.addLayout(hbox)
#run_hook('export_history_dialog', self, hbox)
self.update()
if not d.exec_():
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_history(filename, csv_button.isChecked())
except (IOError, os.error) as reason:
export_error_label = _("Electrum was unable to produce a transaction export.")
self.parent.show_critical(export_error_label + "\n" + str(reason), title=_("Unable to export history"))
return
self.parent.show_message(_("Your wallet history has been successfully exported."))
def do_export_history(self, file_name, is_csv):
hist = self.wallet.get_full_history(domain=self.hm.get_domain(),
from_timestamp=None,
to_timestamp=None,
fx=self.parent.fx,
show_fees=True)
txns = hist['transactions']
lines = []
if is_csv:
for item in txns:
lines.append([item['txid'],
item.get('label', ''),
item['confirmations'],
item['value'],
item.get('fiat_value', ''),
item.get('fee', ''),
item.get('fiat_fee', ''),
item['date']])
with open(file_name, "w+", encoding='utf-8') as f:
if is_csv:
import csv
transaction = csv.writer(f, lineterminator='\n')
transaction.writerow(["transaction_hash",
"label",
"confirmations",
"value",
"fiat_value",
"fee",
"fiat_fee",
"timestamp"])
for line in lines:
transaction.writerow(line)
else:
from electrum.util import json_encode
f.write(json_encode(txns))
def text_txid_from_coordinate(self, row, col):
idx = self.model().mapToSource(self.model().index(row, col))
tx_item = self.hm.transactions.value_from_pos(idx.row())
return self.hm.data(idx, Qt.DisplayRole).value(), tx_item['txid']
| 44.696925 | 142 | 0.618023 |
acf61367ed3991c9629d675e7a6840cdba14a81a | 24,831 | py | Python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2018_10_01/operations/_availability_sets_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2018_10_01/operations/_availability_sets_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2018_10_01/operations/_availability_sets_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class AvailabilitySetsOperations(object):
"""AvailabilitySetsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2018_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create_or_update(
self,
resource_group_name, # type: str
availability_set_name, # type: str
parameters, # type: "models.AvailabilitySet"
**kwargs # type: Any
):
# type: (...) -> "models.AvailabilitySet"
"""Create or update an availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:param parameters: Parameters supplied to the Create Availability Set operation.
:type parameters: ~azure.mgmt.compute.v2018_10_01.models.AvailabilitySet
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AvailabilitySet, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2018_10_01.models.AvailabilitySet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AvailabilitySet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AvailabilitySet')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AvailabilitySet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}'} # type: ignore
def update(
self,
resource_group_name, # type: str
availability_set_name, # type: str
parameters, # type: "models.AvailabilitySetUpdate"
**kwargs # type: Any
):
# type: (...) -> "models.AvailabilitySet"
"""Update an availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:param parameters: Parameters supplied to the Update Availability Set operation.
:type parameters: ~azure.mgmt.compute.v2018_10_01.models.AvailabilitySetUpdate
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AvailabilitySet, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2018_10_01.models.AvailabilitySet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AvailabilitySet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AvailabilitySetUpdate')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AvailabilitySet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}'} # type: ignore
def delete(
self,
resource_group_name, # type: str
availability_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete an availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
availability_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.AvailabilitySet"
"""Retrieves information about an availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AvailabilitySet, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2018_10_01.models.AvailabilitySet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AvailabilitySet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AvailabilitySet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}'} # type: ignore
def list_by_subscription(
self,
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.AvailabilitySetListResult"]
"""Lists all availability sets in a subscription.
:param expand: The expand expression to apply to the operation. Allowed values are
'instanceView'.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AvailabilitySetListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2018_10_01.models.AvailabilitySetListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AvailabilitySetListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AvailabilitySetListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/availabilitySets'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.AvailabilitySetListResult"]
"""Lists all availability sets in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AvailabilitySetListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2018_10_01.models.AvailabilitySetListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AvailabilitySetListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AvailabilitySetListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets'} # type: ignore
def list_available_sizes(
self,
resource_group_name, # type: str
availability_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.VirtualMachineSizeListResult"]
"""Lists all available virtual machine sizes that can be used to create a new virtual machine in
an existing availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualMachineSizeListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2018_10_01.models.VirtualMachineSizeListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_available_sizes.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_available_sizes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}/vmSizes'} # type: ignore
| 48.309339 | 204 | 0.659377 |
acf6139708a51f5458067a5819cff512a41300a3 | 35,577 | py | Python | homeassistant/helpers/config_validation.py | larsvinc/core | 9eb854ef0a6ddb5ecdc4dbe639bedbadab80c90d | [
"Apache-2.0"
] | 3 | 2021-04-27T16:37:48.000Z | 2022-02-23T02:47:33.000Z | homeassistant/helpers/config_validation.py | larsvinc/core | 9eb854ef0a6ddb5ecdc4dbe639bedbadab80c90d | [
"Apache-2.0"
] | 32 | 2021-02-19T07:17:53.000Z | 2022-03-31T06:06:26.000Z | homeassistant/helpers/config_validation.py | coreGreenberet/home-assistant | 3aa9033bdd0250c365d66d016b13a3cd28e80428 | [
"Apache-2.0"
] | 1 | 2021-08-07T10:08:32.000Z | 2021-08-07T10:08:32.000Z | """Helpers for config validation using voluptuous."""
from datetime import (
date as date_sys,
datetime as datetime_sys,
time as time_sys,
timedelta,
)
from enum import Enum
import inspect
import logging
from numbers import Number
import os
import re
from socket import _GLOBAL_DEFAULT_TIMEOUT # type: ignore # private, not in typeshed
from typing import (
Any,
Callable,
Dict,
Hashable,
List,
Optional,
Pattern,
Type,
TypeVar,
Union,
cast,
)
from urllib.parse import urlparse
from uuid import UUID
import voluptuous as vol
import voluptuous_serialize
from homeassistant.const import (
ATTR_AREA_ID,
ATTR_DEVICE_ID,
ATTR_ENTITY_ID,
CONF_ABOVE,
CONF_ALIAS,
CONF_ATTRIBUTE,
CONF_BELOW,
CONF_CHOOSE,
CONF_CONDITION,
CONF_CONDITIONS,
CONF_CONTINUE_ON_TIMEOUT,
CONF_COUNT,
CONF_DEFAULT,
CONF_DELAY,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_ENTITY_NAMESPACE,
CONF_EVENT,
CONF_EVENT_DATA,
CONF_EVENT_DATA_TEMPLATE,
CONF_FOR,
CONF_PLATFORM,
CONF_REPEAT,
CONF_SCAN_INTERVAL,
CONF_SCENE,
CONF_SEQUENCE,
CONF_SERVICE,
CONF_SERVICE_TEMPLATE,
CONF_STATE,
CONF_TARGET,
CONF_TIMEOUT,
CONF_UNIT_SYSTEM_IMPERIAL,
CONF_UNIT_SYSTEM_METRIC,
CONF_UNTIL,
CONF_VALUE_TEMPLATE,
CONF_VARIABLES,
CONF_WAIT_FOR_TRIGGER,
CONF_WAIT_TEMPLATE,
CONF_WHILE,
ENTITY_MATCH_ALL,
ENTITY_MATCH_NONE,
SUN_EVENT_SUNRISE,
SUN_EVENT_SUNSET,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
WEEKDAYS,
)
from homeassistant.core import split_entity_id, valid_entity_id
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import (
script_variables as script_variables_helper,
template as template_helper,
)
from homeassistant.helpers.logging import KeywordStyleAdapter
from homeassistant.util import raise_if_invalid_path, slugify as util_slugify
import homeassistant.util.dt as dt_util
# pylint: disable=invalid-name
TIME_PERIOD_ERROR = "offset {} should be format 'HH:MM', 'HH:MM:SS' or 'HH:MM:SS.F'"
# Home Assistant types
byte = vol.All(vol.Coerce(int), vol.Range(min=0, max=255))
small_float = vol.All(vol.Coerce(float), vol.Range(min=0, max=1))
positive_int = vol.All(vol.Coerce(int), vol.Range(min=0))
positive_float = vol.All(vol.Coerce(float), vol.Range(min=0))
latitude = vol.All(
vol.Coerce(float), vol.Range(min=-90, max=90), msg="invalid latitude"
)
longitude = vol.All(
vol.Coerce(float), vol.Range(min=-180, max=180), msg="invalid longitude"
)
gps = vol.ExactSequence([latitude, longitude])
sun_event = vol.All(vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE))
port = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
# typing typevar
T = TypeVar("T")
def path(value: Any) -> str:
"""Validate it's a safe path."""
if not isinstance(value, str):
raise vol.Invalid("Expected a string")
try:
raise_if_invalid_path(value)
except ValueError as err:
raise vol.Invalid("Invalid path") from err
return value
# Adapted from:
# https://github.com/alecthomas/voluptuous/issues/115#issuecomment-144464666
def has_at_least_one_key(*keys: str) -> Callable:
"""Validate that at least one key exists."""
def validate(obj: Dict) -> Dict:
"""Test keys exist in dict."""
if not isinstance(obj, dict):
raise vol.Invalid("expected dictionary")
for k in obj:
if k in keys:
return obj
raise vol.Invalid("must contain at least one of {}.".format(", ".join(keys)))
return validate
def has_at_most_one_key(*keys: str) -> Callable[[Dict], Dict]:
"""Validate that zero keys exist or one key exists."""
def validate(obj: Dict) -> Dict:
"""Test zero keys exist or one key exists in dict."""
if not isinstance(obj, dict):
raise vol.Invalid("expected dictionary")
if len(set(keys) & set(obj)) > 1:
raise vol.Invalid("must contain at most one of {}.".format(", ".join(keys)))
return obj
return validate
def boolean(value: Any) -> bool:
"""Validate and coerce a boolean value."""
if isinstance(value, bool):
return value
if isinstance(value, str):
value = value.lower().strip()
if value in ("1", "true", "yes", "on", "enable"):
return True
if value in ("0", "false", "no", "off", "disable"):
return False
elif isinstance(value, Number):
# type ignore: https://github.com/python/mypy/issues/3186
return value != 0 # type: ignore
raise vol.Invalid(f"invalid boolean value {value}")
_WS = re.compile("\\s*")
def whitespace(value: Any) -> str:
"""Validate result contains only whitespace."""
if isinstance(value, str) and _WS.fullmatch(value):
return value
raise vol.Invalid(f"contains non-whitespace: {value}")
def isdevice(value: Any) -> str:
"""Validate that value is a real device."""
try:
os.stat(value)
return str(value)
except OSError as err:
raise vol.Invalid(f"No device at {value} found") from err
def matches_regex(regex: str) -> Callable[[Any], str]:
"""Validate that the value is a string that matches a regex."""
compiled = re.compile(regex)
def validator(value: Any) -> str:
"""Validate that value matches the given regex."""
if not isinstance(value, str):
raise vol.Invalid(f"not a string value: {value}")
if not compiled.match(value):
raise vol.Invalid(
f"value {value} does not match regular expression {compiled.pattern}"
)
return value
return validator
def is_regex(value: Any) -> Pattern[Any]:
"""Validate that a string is a valid regular expression."""
try:
r = re.compile(value)
return r
except TypeError as err:
raise vol.Invalid(
f"value {value} is of the wrong type for a regular expression"
) from err
except re.error as err:
raise vol.Invalid(f"value {value} is not a valid regular expression") from err
def isfile(value: Any) -> str:
"""Validate that the value is an existing file."""
if value is None:
raise vol.Invalid("None is not file")
file_in = os.path.expanduser(str(value))
if not os.path.isfile(file_in):
raise vol.Invalid("not a file")
if not os.access(file_in, os.R_OK):
raise vol.Invalid("file not readable")
return file_in
def isdir(value: Any) -> str:
"""Validate that the value is an existing dir."""
if value is None:
raise vol.Invalid("not a directory")
dir_in = os.path.expanduser(str(value))
if not os.path.isdir(dir_in):
raise vol.Invalid("not a directory")
if not os.access(dir_in, os.R_OK):
raise vol.Invalid("directory not readable")
return dir_in
def ensure_list(value: Union[T, List[T], None]) -> List[T]:
"""Wrap value in list if it is not one."""
if value is None:
return []
return value if isinstance(value, list) else [value]
def entity_id(value: Any) -> str:
"""Validate Entity ID."""
str_value = string(value).lower()
if valid_entity_id(str_value):
return str_value
raise vol.Invalid(f"Entity ID {value} is an invalid entity id")
def entity_ids(value: Union[str, List]) -> List[str]:
"""Validate Entity IDs."""
if value is None:
raise vol.Invalid("Entity IDs can not be None")
if isinstance(value, str):
value = [ent_id.strip() for ent_id in value.split(",")]
return [entity_id(ent_id) for ent_id in value]
comp_entity_ids = vol.Any(
vol.All(vol.Lower, vol.Any(ENTITY_MATCH_ALL, ENTITY_MATCH_NONE)), entity_ids
)
def entity_domain(domain: Union[str, List[str]]) -> Callable[[Any], str]:
"""Validate that entity belong to domain."""
ent_domain = entities_domain(domain)
def validate(value: str) -> str:
"""Test if entity domain is domain."""
validated = ent_domain(value)
if len(validated) != 1:
raise vol.Invalid(f"Expected exactly 1 entity, got {len(validated)}")
return validated[0]
return validate
def entities_domain(
domain: Union[str, List[str]]
) -> Callable[[Union[str, List]], List[str]]:
"""Validate that entities belong to domain."""
if isinstance(domain, str):
def check_invalid(val: str) -> bool:
return val != domain
else:
def check_invalid(val: str) -> bool:
return val not in domain
def validate(values: Union[str, List]) -> List[str]:
"""Test if entity domain is domain."""
values = entity_ids(values)
for ent_id in values:
if check_invalid(split_entity_id(ent_id)[0]):
raise vol.Invalid(
f"Entity ID '{ent_id}' does not belong to domain '{domain}'"
)
return values
return validate
def enum(enumClass: Type[Enum]) -> vol.All:
"""Create validator for specified enum."""
return vol.All(vol.In(enumClass.__members__), enumClass.__getitem__)
def icon(value: Any) -> str:
"""Validate icon."""
str_value = str(value)
if ":" in str_value:
return str_value
raise vol.Invalid('Icons should be specified in the form "prefix:name"')
time_period_dict = vol.All(
dict,
vol.Schema(
{
"days": vol.Coerce(float),
"hours": vol.Coerce(float),
"minutes": vol.Coerce(float),
"seconds": vol.Coerce(float),
"milliseconds": vol.Coerce(float),
}
),
has_at_least_one_key("days", "hours", "minutes", "seconds", "milliseconds"),
lambda value: timedelta(**value),
)
def time(value: Any) -> time_sys:
"""Validate and transform a time."""
if isinstance(value, time_sys):
return value
try:
time_val = dt_util.parse_time(value)
except TypeError as err:
raise vol.Invalid("Not a parseable type") from err
if time_val is None:
raise vol.Invalid(f"Invalid time specified: {value}")
return time_val
def date(value: Any) -> date_sys:
"""Validate and transform a date."""
if isinstance(value, date_sys):
return value
try:
date_val = dt_util.parse_date(value)
except TypeError as err:
raise vol.Invalid("Not a parseable type") from err
if date_val is None:
raise vol.Invalid("Could not parse date")
return date_val
def time_period_str(value: str) -> timedelta:
"""Validate and transform time offset."""
if isinstance(value, int): # type: ignore
raise vol.Invalid("Make sure you wrap time values in quotes")
if not isinstance(value, str):
raise vol.Invalid(TIME_PERIOD_ERROR.format(value))
negative_offset = False
if value.startswith("-"):
negative_offset = True
value = value[1:]
elif value.startswith("+"):
value = value[1:]
parsed = value.split(":")
if len(parsed) not in (2, 3):
raise vol.Invalid(TIME_PERIOD_ERROR.format(value))
try:
hour = int(parsed[0])
minute = int(parsed[1])
try:
second = float(parsed[2])
except IndexError:
second = 0
except ValueError as err:
raise vol.Invalid(TIME_PERIOD_ERROR.format(value)) from err
offset = timedelta(hours=hour, minutes=minute, seconds=second)
if negative_offset:
offset *= -1
return offset
def time_period_seconds(value: Union[float, str]) -> timedelta:
"""Validate and transform seconds to a time offset."""
try:
return timedelta(seconds=float(value))
except (ValueError, TypeError) as err:
raise vol.Invalid(f"Expected seconds, got {value}") from err
time_period = vol.Any(time_period_str, time_period_seconds, timedelta, time_period_dict)
def match_all(value: T) -> T:
"""Validate that matches all values."""
return value
def positive_timedelta(value: timedelta) -> timedelta:
"""Validate timedelta is positive."""
if value < timedelta(0):
raise vol.Invalid("Time period should be positive")
return value
positive_time_period_dict = vol.All(time_period_dict, positive_timedelta)
positive_time_period = vol.All(time_period, positive_timedelta)
def remove_falsy(value: List[T]) -> List[T]:
"""Remove falsy values from a list."""
return [v for v in value if v]
def service(value: Any) -> str:
"""Validate service."""
# Services use same format as entities so we can use same helper.
str_value = string(value).lower()
if valid_entity_id(str_value):
return str_value
raise vol.Invalid(f"Service {value} does not match format <domain>.<name>")
def slug(value: Any) -> str:
"""Validate value is a valid slug."""
if value is None:
raise vol.Invalid("Slug should not be None")
str_value = str(value)
slg = util_slugify(str_value)
if str_value == slg:
return str_value
raise vol.Invalid(f"invalid slug {value} (try {slg})")
def schema_with_slug_keys(
value_schema: Union[T, Callable], *, slug_validator: Callable[[Any], str] = slug
) -> Callable:
"""Ensure dicts have slugs as keys.
Replacement of vol.Schema({cv.slug: value_schema}) to prevent misleading
"Extra keys" errors from voluptuous.
"""
schema = vol.Schema({str: value_schema})
def verify(value: Dict) -> Dict:
"""Validate all keys are slugs and then the value_schema."""
if not isinstance(value, dict):
raise vol.Invalid("expected dictionary")
for key in value.keys():
slug_validator(key)
return cast(Dict, schema(value))
return verify
def slugify(value: Any) -> str:
"""Coerce a value to a slug."""
if value is None:
raise vol.Invalid("Slug should not be None")
slg = util_slugify(str(value))
if slg:
return slg
raise vol.Invalid(f"Unable to slugify {value}")
def string(value: Any) -> str:
"""Coerce value to string, except for None."""
if value is None:
raise vol.Invalid("string value is None")
if isinstance(value, template_helper.ResultWrapper):
value = value.render_result
elif isinstance(value, (list, dict)):
raise vol.Invalid("value should be a string")
return str(value)
def string_with_no_html(value: Any) -> str:
"""Validate that the value is a string without HTML."""
value = string(value)
regex = re.compile(r"<[a-z][\s\S]*>")
if regex.search(value):
raise vol.Invalid("the string should not contain HTML")
return str(value)
def temperature_unit(value: Any) -> str:
"""Validate and transform temperature unit."""
value = str(value).upper()
if value == "C":
return TEMP_CELSIUS
if value == "F":
return TEMP_FAHRENHEIT
raise vol.Invalid("invalid temperature unit (expected C or F)")
unit_system = vol.All(
vol.Lower, vol.Any(CONF_UNIT_SYSTEM_METRIC, CONF_UNIT_SYSTEM_IMPERIAL)
)
def template(value: Optional[Any]) -> template_helper.Template:
"""Validate a jinja2 template."""
if value is None:
raise vol.Invalid("template value is None")
if isinstance(value, (list, dict, template_helper.Template)):
raise vol.Invalid("template value should be a string")
template_value = template_helper.Template(str(value)) # type: ignore
try:
template_value.ensure_valid()
return template_value
except TemplateError as ex:
raise vol.Invalid(f"invalid template ({ex})") from ex
def dynamic_template(value: Optional[Any]) -> template_helper.Template:
"""Validate a dynamic (non static) jinja2 template."""
if value is None:
raise vol.Invalid("template value is None")
if isinstance(value, (list, dict, template_helper.Template)):
raise vol.Invalid("template value should be a string")
if not template_helper.is_template_string(str(value)):
raise vol.Invalid("template value does not contain a dynmamic template")
template_value = template_helper.Template(str(value)) # type: ignore
try:
template_value.ensure_valid()
return template_value
except TemplateError as ex:
raise vol.Invalid(f"invalid template ({ex})") from ex
def template_complex(value: Any) -> Any:
"""Validate a complex jinja2 template."""
if isinstance(value, list):
return_list = value.copy()
for idx, element in enumerate(return_list):
return_list[idx] = template_complex(element)
return return_list
if isinstance(value, dict):
return {
template_complex(key): template_complex(element)
for key, element in value.items()
}
if isinstance(value, str) and template_helper.is_template_string(value):
return template(value)
return value
positive_time_period_template = vol.Any(
positive_time_period, template, template_complex
)
def datetime(value: Any) -> datetime_sys:
"""Validate datetime."""
if isinstance(value, datetime_sys):
return value
try:
date_val = dt_util.parse_datetime(value)
except TypeError:
date_val = None
if date_val is None:
raise vol.Invalid(f"Invalid datetime specified: {value}")
return date_val
def time_zone(value: str) -> str:
"""Validate timezone."""
if dt_util.get_time_zone(value) is not None:
return value
raise vol.Invalid(
"Invalid time zone passed in. Valid options can be found here: "
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones"
)
weekdays = vol.All(ensure_list, [vol.In(WEEKDAYS)])
def socket_timeout(value: Optional[Any]) -> object:
"""Validate timeout float > 0.0.
None coerced to socket._GLOBAL_DEFAULT_TIMEOUT bare object.
"""
if value is None:
return _GLOBAL_DEFAULT_TIMEOUT
try:
float_value = float(value)
if float_value > 0.0:
return float_value
raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.")
except Exception as err:
raise vol.Invalid(f"Invalid socket timeout: {err}")
# pylint: disable=no-value-for-parameter
def url(value: Any) -> str:
"""Validate an URL."""
url_in = str(value)
if urlparse(url_in).scheme in ["http", "https"]:
return cast(str, vol.Schema(vol.Url())(url_in))
raise vol.Invalid("invalid url")
def x10_address(value: str) -> str:
"""Validate an x10 address."""
regex = re.compile(r"([A-Pa-p]{1})(?:[2-9]|1[0-6]?)$")
if not regex.match(value):
raise vol.Invalid("Invalid X10 Address")
return str(value).lower()
def uuid4_hex(value: Any) -> str:
"""Validate a v4 UUID in hex format."""
try:
result = UUID(value, version=4)
except (ValueError, AttributeError, TypeError) as error:
raise vol.Invalid("Invalid Version4 UUID", error_message=str(error))
if result.hex != value.lower():
# UUID() will create a uuid4 if input is invalid
raise vol.Invalid("Invalid Version4 UUID")
return result.hex
def ensure_list_csv(value: Any) -> List:
"""Ensure that input is a list or make one from comma-separated string."""
if isinstance(value, str):
return [member.strip() for member in value.split(",")]
return ensure_list(value)
class multi_select:
"""Multi select validator returning list of selected values."""
def __init__(self, options: dict) -> None:
"""Initialize multi select."""
self.options = options
def __call__(self, selected: list) -> list:
"""Validate input."""
if not isinstance(selected, list):
raise vol.Invalid("Not a list")
for value in selected:
if value not in self.options:
raise vol.Invalid(f"{value} is not a valid option")
return selected
def deprecated(
key: str,
replacement_key: Optional[str] = None,
default: Optional[Any] = None,
) -> Callable[[Dict], Dict]:
"""
Log key as deprecated and provide a replacement (if exists).
Expected behavior:
- Outputs the appropriate deprecation warning if key is detected
- Processes schema moving the value from key to replacement_key
- Processes schema changing nothing if only replacement_key provided
- No warning if only replacement_key provided
- No warning if neither key nor replacement_key are provided
- Adds replacement_key with default value in this case
"""
module = inspect.getmodule(inspect.stack()[1][0])
if module is not None:
module_name = module.__name__
else:
# If Python is unable to access the sources files, the call stack frame
# will be missing information, so let's guard.
# https://github.com/home-assistant/core/issues/24982
module_name = __name__
if replacement_key:
warning = (
"The '{key}' option is deprecated,"
" please replace it with '{replacement_key}'"
)
else:
warning = (
"The '{key}' option is deprecated,"
" please remove it from your configuration"
)
def validator(config: Dict) -> Dict:
"""Check if key is in config and log warning."""
if key in config:
KeywordStyleAdapter(logging.getLogger(module_name)).warning(
warning,
key=key,
replacement_key=replacement_key,
)
value = config[key]
if replacement_key:
config.pop(key)
else:
value = default
keys = [key]
if replacement_key:
keys.append(replacement_key)
if value is not None and (
replacement_key not in config or default == config.get(replacement_key)
):
config[replacement_key] = value
return has_at_most_one_key(*keys)(config)
return validator
def key_value_schemas(
key: str, value_schemas: Dict[str, vol.Schema]
) -> Callable[[Any], Dict[str, Any]]:
"""Create a validator that validates based on a value for specific key.
This gives better error messages.
"""
def key_value_validator(value: Any) -> Dict[str, Any]:
if not isinstance(value, dict):
raise vol.Invalid("Expected a dictionary")
key_value = value.get(key)
if key_value not in value_schemas:
raise vol.Invalid(
f"Unexpected value for {key}: '{key_value}'. Expected {', '.join(value_schemas)}"
)
return cast(Dict[str, Any], value_schemas[key_value](value))
return key_value_validator
# Validator helpers
def key_dependency(
key: Hashable, dependency: Hashable
) -> Callable[[Dict[Hashable, Any]], Dict[Hashable, Any]]:
"""Validate that all dependencies exist for key."""
def validator(value: Dict[Hashable, Any]) -> Dict[Hashable, Any]:
"""Test dependencies."""
if not isinstance(value, dict):
raise vol.Invalid("key dependencies require a dict")
if key in value and dependency not in value:
raise vol.Invalid(
f'dependency violation - key "{key}" requires '
f'key "{dependency}" to exist'
)
return value
return validator
def custom_serializer(schema: Any) -> Any:
"""Serialize additional types for voluptuous_serialize."""
if schema is positive_time_period_dict:
return {"type": "positive_time_period_dict"}
if schema is string:
return {"type": "string"}
if schema is boolean:
return {"type": "boolean"}
if isinstance(schema, multi_select):
return {"type": "multi_select", "options": schema.options}
return voluptuous_serialize.UNSUPPORTED
# Schemas
PLATFORM_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): string,
vol.Optional(CONF_ENTITY_NAMESPACE): string,
vol.Optional(CONF_SCAN_INTERVAL): time_period,
}
)
PLATFORM_SCHEMA_BASE = PLATFORM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA)
ENTITY_SERVICE_FIELDS = {
vol.Optional(ATTR_ENTITY_ID): comp_entity_ids,
vol.Optional(ATTR_DEVICE_ID): vol.Any(
ENTITY_MATCH_NONE, vol.All(ensure_list, [str])
),
vol.Optional(ATTR_AREA_ID): vol.Any(ENTITY_MATCH_NONE, vol.All(ensure_list, [str])),
}
def make_entity_service_schema(
schema: dict, *, extra: int = vol.PREVENT_EXTRA
) -> vol.All:
"""Create an entity service schema."""
return vol.All(
vol.Schema(
{
**schema,
**ENTITY_SERVICE_FIELDS,
},
extra=extra,
),
has_at_least_one_key(*ENTITY_SERVICE_FIELDS),
)
SCRIPT_VARIABLES_SCHEMA = vol.All(
vol.Schema({str: template_complex}),
# pylint: disable=unnecessary-lambda
lambda val: script_variables_helper.ScriptVariables(val),
)
def script_action(value: Any) -> dict:
"""Validate a script action."""
if not isinstance(value, dict):
raise vol.Invalid("expected dictionary")
return ACTION_TYPE_SCHEMAS[determine_script_action(value)](value)
SCRIPT_SCHEMA = vol.All(ensure_list, [script_action])
EVENT_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALIAS): string,
vol.Required(CONF_EVENT): string,
vol.Optional(CONF_EVENT_DATA): vol.All(dict, template_complex),
vol.Optional(CONF_EVENT_DATA_TEMPLATE): vol.All(dict, template_complex),
}
)
SERVICE_SCHEMA = vol.All(
vol.Schema(
{
vol.Optional(CONF_ALIAS): string,
vol.Exclusive(CONF_SERVICE, "service name"): vol.Any(
service, dynamic_template
),
vol.Exclusive(CONF_SERVICE_TEMPLATE, "service name"): vol.Any(
service, dynamic_template
),
vol.Optional("data"): vol.All(dict, template_complex),
vol.Optional("data_template"): vol.All(dict, template_complex),
vol.Optional(CONF_ENTITY_ID): comp_entity_ids,
vol.Optional(CONF_TARGET): ENTITY_SERVICE_FIELDS,
}
),
has_at_least_one_key(CONF_SERVICE, CONF_SERVICE_TEMPLATE),
)
NUMERIC_STATE_THRESHOLD_SCHEMA = vol.Any(
vol.Coerce(float), vol.All(str, entity_domain("input_number"))
)
NUMERIC_STATE_CONDITION_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_CONDITION): "numeric_state",
vol.Required(CONF_ENTITY_ID): entity_ids,
vol.Optional(CONF_ATTRIBUTE): str,
CONF_BELOW: NUMERIC_STATE_THRESHOLD_SCHEMA,
CONF_ABOVE: NUMERIC_STATE_THRESHOLD_SCHEMA,
vol.Optional(CONF_VALUE_TEMPLATE): template,
}
),
has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
)
STATE_CONDITION_BASE_SCHEMA = {
vol.Required(CONF_CONDITION): "state",
vol.Required(CONF_ENTITY_ID): entity_ids,
vol.Optional(CONF_ATTRIBUTE): str,
vol.Optional(CONF_FOR): positive_time_period,
# To support use_trigger_value in automation
# Deprecated 2016/04/25
vol.Optional("from"): str,
}
STATE_CONDITION_STATE_SCHEMA = vol.Schema(
{
**STATE_CONDITION_BASE_SCHEMA,
vol.Required(CONF_STATE): vol.Any(str, [str]),
}
)
STATE_CONDITION_ATTRIBUTE_SCHEMA = vol.Schema(
{
**STATE_CONDITION_BASE_SCHEMA,
vol.Required(CONF_STATE): match_all,
}
)
def STATE_CONDITION_SCHEMA(value: Any) -> dict: # pylint: disable=invalid-name
"""Validate a state condition."""
if not isinstance(value, dict):
raise vol.Invalid("Expected a dictionary")
if CONF_ATTRIBUTE in value:
validated: dict = STATE_CONDITION_ATTRIBUTE_SCHEMA(value)
else:
validated = STATE_CONDITION_STATE_SCHEMA(value)
return key_dependency("for", "state")(validated)
SUN_CONDITION_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_CONDITION): "sun",
vol.Optional("before"): sun_event,
vol.Optional("before_offset"): time_period,
vol.Optional("after"): vol.All(
vol.Lower, vol.Any(SUN_EVENT_SUNSET, SUN_EVENT_SUNRISE)
),
vol.Optional("after_offset"): time_period,
}
),
has_at_least_one_key("before", "after"),
)
TEMPLATE_CONDITION_SCHEMA = vol.Schema(
{
vol.Required(CONF_CONDITION): "template",
vol.Required(CONF_VALUE_TEMPLATE): template,
}
)
TIME_CONDITION_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_CONDITION): "time",
"before": vol.Any(time, vol.All(str, entity_domain("input_datetime"))),
"after": vol.Any(time, vol.All(str, entity_domain("input_datetime"))),
"weekday": weekdays,
}
),
has_at_least_one_key("before", "after", "weekday"),
)
ZONE_CONDITION_SCHEMA = vol.Schema(
{
vol.Required(CONF_CONDITION): "zone",
vol.Required(CONF_ENTITY_ID): entity_ids,
"zone": entity_ids,
# To support use_trigger_value in automation
# Deprecated 2016/04/25
vol.Optional("event"): vol.Any("enter", "leave"),
}
)
AND_CONDITION_SCHEMA = vol.Schema(
{
vol.Required(CONF_CONDITION): "and",
vol.Required(CONF_CONDITIONS): vol.All(
ensure_list,
# pylint: disable=unnecessary-lambda
[lambda value: CONDITION_SCHEMA(value)],
),
}
)
OR_CONDITION_SCHEMA = vol.Schema(
{
vol.Required(CONF_CONDITION): "or",
vol.Required(CONF_CONDITIONS): vol.All(
ensure_list,
# pylint: disable=unnecessary-lambda
[lambda value: CONDITION_SCHEMA(value)],
),
}
)
NOT_CONDITION_SCHEMA = vol.Schema(
{
vol.Required(CONF_CONDITION): "not",
vol.Required(CONF_CONDITIONS): vol.All(
ensure_list,
# pylint: disable=unnecessary-lambda
[lambda value: CONDITION_SCHEMA(value)],
),
}
)
DEVICE_CONDITION_BASE_SCHEMA = vol.Schema(
{
vol.Required(CONF_CONDITION): "device",
vol.Required(CONF_DEVICE_ID): str,
vol.Required(CONF_DOMAIN): str,
}
)
DEVICE_CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA)
CONDITION_SCHEMA: vol.Schema = vol.Schema(
vol.Any(
key_value_schemas(
CONF_CONDITION,
{
"numeric_state": NUMERIC_STATE_CONDITION_SCHEMA,
"state": STATE_CONDITION_SCHEMA,
"sun": SUN_CONDITION_SCHEMA,
"template": TEMPLATE_CONDITION_SCHEMA,
"time": TIME_CONDITION_SCHEMA,
"zone": ZONE_CONDITION_SCHEMA,
"and": AND_CONDITION_SCHEMA,
"or": OR_CONDITION_SCHEMA,
"not": NOT_CONDITION_SCHEMA,
"device": DEVICE_CONDITION_SCHEMA,
},
),
dynamic_template,
)
)
TRIGGER_SCHEMA = vol.All(
ensure_list, [vol.Schema({vol.Required(CONF_PLATFORM): str}, extra=vol.ALLOW_EXTRA)]
)
_SCRIPT_DELAY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALIAS): string,
vol.Required(CONF_DELAY): positive_time_period_template,
}
)
_SCRIPT_WAIT_TEMPLATE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALIAS): string,
vol.Required(CONF_WAIT_TEMPLATE): template,
vol.Optional(CONF_TIMEOUT): positive_time_period_template,
vol.Optional(CONF_CONTINUE_ON_TIMEOUT): boolean,
}
)
DEVICE_ACTION_BASE_SCHEMA = vol.Schema(
{vol.Required(CONF_DEVICE_ID): string, vol.Required(CONF_DOMAIN): str}
)
DEVICE_ACTION_SCHEMA = DEVICE_ACTION_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA)
_SCRIPT_SCENE_SCHEMA = vol.Schema({vol.Required(CONF_SCENE): entity_domain("scene")})
_SCRIPT_REPEAT_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALIAS): string,
vol.Required(CONF_REPEAT): vol.All(
{
vol.Exclusive(CONF_COUNT, "repeat"): vol.Any(vol.Coerce(int), template),
vol.Exclusive(CONF_WHILE, "repeat"): vol.All(
ensure_list, [CONDITION_SCHEMA]
),
vol.Exclusive(CONF_UNTIL, "repeat"): vol.All(
ensure_list, [CONDITION_SCHEMA]
),
vol.Required(CONF_SEQUENCE): SCRIPT_SCHEMA,
},
has_at_least_one_key(CONF_COUNT, CONF_WHILE, CONF_UNTIL),
),
}
)
_SCRIPT_CHOOSE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALIAS): string,
vol.Required(CONF_CHOOSE): vol.All(
ensure_list,
[
{
vol.Required(CONF_CONDITIONS): vol.All(
ensure_list, [CONDITION_SCHEMA]
),
vol.Required(CONF_SEQUENCE): SCRIPT_SCHEMA,
}
],
),
vol.Optional(CONF_DEFAULT): SCRIPT_SCHEMA,
}
)
_SCRIPT_WAIT_FOR_TRIGGER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALIAS): string,
vol.Required(CONF_WAIT_FOR_TRIGGER): TRIGGER_SCHEMA,
vol.Optional(CONF_TIMEOUT): positive_time_period_template,
vol.Optional(CONF_CONTINUE_ON_TIMEOUT): boolean,
}
)
_SCRIPT_SET_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ALIAS): string,
vol.Required(CONF_VARIABLES): SCRIPT_VARIABLES_SCHEMA,
}
)
SCRIPT_ACTION_DELAY = "delay"
SCRIPT_ACTION_WAIT_TEMPLATE = "wait_template"
SCRIPT_ACTION_CHECK_CONDITION = "condition"
SCRIPT_ACTION_FIRE_EVENT = "event"
SCRIPT_ACTION_CALL_SERVICE = "call_service"
SCRIPT_ACTION_DEVICE_AUTOMATION = "device"
SCRIPT_ACTION_ACTIVATE_SCENE = "scene"
SCRIPT_ACTION_REPEAT = "repeat"
SCRIPT_ACTION_CHOOSE = "choose"
SCRIPT_ACTION_WAIT_FOR_TRIGGER = "wait_for_trigger"
SCRIPT_ACTION_VARIABLES = "variables"
def determine_script_action(action: dict) -> str:
"""Determine action type."""
if CONF_DELAY in action:
return SCRIPT_ACTION_DELAY
if CONF_WAIT_TEMPLATE in action:
return SCRIPT_ACTION_WAIT_TEMPLATE
if CONF_CONDITION in action:
return SCRIPT_ACTION_CHECK_CONDITION
if CONF_EVENT in action:
return SCRIPT_ACTION_FIRE_EVENT
if CONF_DEVICE_ID in action:
return SCRIPT_ACTION_DEVICE_AUTOMATION
if CONF_SCENE in action:
return SCRIPT_ACTION_ACTIVATE_SCENE
if CONF_REPEAT in action:
return SCRIPT_ACTION_REPEAT
if CONF_CHOOSE in action:
return SCRIPT_ACTION_CHOOSE
if CONF_WAIT_FOR_TRIGGER in action:
return SCRIPT_ACTION_WAIT_FOR_TRIGGER
if CONF_VARIABLES in action:
return SCRIPT_ACTION_VARIABLES
return SCRIPT_ACTION_CALL_SERVICE
ACTION_TYPE_SCHEMAS: Dict[str, Callable[[Any], dict]] = {
SCRIPT_ACTION_CALL_SERVICE: SERVICE_SCHEMA,
SCRIPT_ACTION_DELAY: _SCRIPT_DELAY_SCHEMA,
SCRIPT_ACTION_WAIT_TEMPLATE: _SCRIPT_WAIT_TEMPLATE_SCHEMA,
SCRIPT_ACTION_FIRE_EVENT: EVENT_SCHEMA,
SCRIPT_ACTION_CHECK_CONDITION: CONDITION_SCHEMA,
SCRIPT_ACTION_DEVICE_AUTOMATION: DEVICE_ACTION_SCHEMA,
SCRIPT_ACTION_ACTIVATE_SCENE: _SCRIPT_SCENE_SCHEMA,
SCRIPT_ACTION_REPEAT: _SCRIPT_REPEAT_SCHEMA,
SCRIPT_ACTION_CHOOSE: _SCRIPT_CHOOSE_SCHEMA,
SCRIPT_ACTION_WAIT_FOR_TRIGGER: _SCRIPT_WAIT_FOR_TRIGGER_SCHEMA,
SCRIPT_ACTION_VARIABLES: _SCRIPT_SET_SCHEMA,
}
| 28.971498 | 97 | 0.645839 |
acf613e74b5f5c459e3e0ac56e917fa54ce06e89 | 2,866 | py | Python | movieInfoScraper.py | VladislavPetrusenko/Python-codes | 35bf4a891eb25ee1c8fe73b992ebf29c94291ff6 | [
"MIT"
] | null | null | null | movieInfoScraper.py | VladislavPetrusenko/Python-codes | 35bf4a891eb25ee1c8fe73b992ebf29c94291ff6 | [
"MIT"
] | null | null | null | movieInfoScraper.py | VladislavPetrusenko/Python-codes | 35bf4a891eb25ee1c8fe73b992ebf29c94291ff6 | [
"MIT"
] | null | null | null | from bs4 import BeautifulSoup
import requests
def getMovieDetails(movieName):
url = 'https://www.imdb.com'
query = '/search/title?title='
movieDetails = {}
movienamequery = query + '+'.join(movieName.strip().split(' '))
html = requests.get(url + movienamequery + '&title_type=feature')
bs = BeautifulSoup(html.text, 'html.parser')
result = bs.find('h3', {'class': 'lister-item-header'})
if result is None:
return None
movielink = url + result.a.attrs['href']
movieDetails['name'] = result.a.text
html = requests.get(movielink)
bs = BeautifulSoup(html.text, 'html.parser')
try:
movieDetails['year'] = bs.find('span', {'id': 'titleYear'}).a.text
except AttributeError:
movieDetails['year'] = 'Not available'
subtext = bs.find('div', {'class': 'subtext'})
movieDetails['genres'] = [i.text for i in subtext.findAll('a', {'title': None})]
try:
movieDetails['rating'] = bs.find('div', {'class': 'ratingValue'}).span.text
movieDetails['runtime'] = subtext.time.text.strip()
except AttributeError:
movieDetails['rating'] = 'Not yet rated'
movieDetails['runtime'] = 'Not available'
movieDetails['release_date'] = subtext.find('a', {'title': 'See more release dates'}).text.strip()
creditSummary = bs.findAll('div', {'class': 'credit_summary_item'})
movieDetails['directors'] = [i.text for i in creditSummary[0].findAll('a')]
movieDetails['writers'] = [i.text for i in creditSummary[1].findAll('a') if 'name' in i.attrs['href']]
try:
movieDetails['cast'] = [i.text for i in creditSummary[2].findAll('a') if 'name' in i.attrs['href']]
except IndexError:
movieDetails['cast'] = movieDetails['writers']
movieDetails['writers'] = 'Not found'
html = requests.get(movielink + 'plotsummary')
bs = BeautifulSoup(html.text, 'html.parser')
movieDetails['plot'] = bs.find('li', {'class': 'ipl-zebra-list__item'}).p.text.strip()
return movieDetails
if __name__ == '__main__':
movieName = input('Enter the movie name whose details are to be fetched\n')
movieDetails = getMovieDetails(movieName)
if movieDetails is None:
print('No movie of this name found!!!!!')
quit()
print('\n{movie} ({year})'.format(movie = movieDetails['name'], year = movieDetails['year']))
print('Rating:', movieDetails['rating'])
print('Runtime:', movieDetails['runtime'])
print('Release Date:', movieDetails['release_date'])
print('Genres:', ', '.join(movieDetails['genres']))
print('Director:', ', '.join(movieDetails['directors']))
print('Writer:', ', '.join(movieDetails['writers']))
print('Cast:', ', '.join(movieDetails['cast']))
print('Plot Summary:\n', movieDetails['plot']) | 44.092308 | 108 | 0.622819 |
acf6149fe81ea3b1a00c99e48018e5e6f3eaa758 | 860 | py | Python | employees/admin.py | Atwinenickson/lendsuphumanresourcemanagement | b46df164d59a4e94300376d679e07bd9a60d6343 | [
"MIT",
"Unlicense"
] | 36 | 2019-11-26T11:46:32.000Z | 2022-02-17T13:18:18.000Z | employees/admin.py | Atwinenickson/lendsuphumanresourcemanagement | b46df164d59a4e94300376d679e07bd9a60d6343 | [
"MIT",
"Unlicense"
] | 13 | 2020-02-14T09:30:16.000Z | 2022-03-12T00:58:09.000Z | employees/admin.py | Atwinenickson/lendsuphumanresourcemanagement | b46df164d59a4e94300376d679e07bd9a60d6343 | [
"MIT",
"Unlicense"
] | 16 | 2019-06-14T12:11:29.000Z | 2022-02-14T15:16:07.000Z | from django.contrib import admin
# Register your models here.
from organisation_details.models import Team, Department, Position, OrganisationDetail
from .models import Employee, HomeAddress, Certification, EmergencyContact, Beneficiary, Spouse, Dependant, Deduction, \
BankDetail, Allowance, StatutoryDeduction
admin.site.site_header = "Soliton Telmec EMS Admin"
admin.site.register(Employee)
admin.site.register(HomeAddress)
admin.site.register(Certification)
admin.site.register(EmergencyContact)
admin.site.register(Beneficiary)
admin.site.register(Spouse)
admin.site.register(Dependant)
admin.site.register(Deduction)
admin.site.register(BankDetail)
admin.site.register(Team)
admin.site.register(Department)
admin.site.register(Position)
admin.site.register(OrganisationDetail)
admin.site.register(Allowance)
admin.site.register(StatutoryDeduction)
| 35.833333 | 120 | 0.834884 |
acf6150d200287be678c2747604f79306bf039dd | 2,708 | py | Python | doc/source/conf.py | lingxiankong/python-octaviaclient | 97604613f55e3c76a3625f7b34dbf1a8088c1999 | [
"Apache-2.0"
] | null | null | null | doc/source/conf.py | lingxiankong/python-octaviaclient | 97604613f55e3c76a3625f7b34dbf1a8088c1999 | [
"Apache-2.0"
] | null | null | null | doc/source/conf.py | lingxiankong/python-octaviaclient | 97604613f55e3c76a3625f7b34dbf1a8088c1999 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'openstackdocstheme',
'cliff.sphinxext'
]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'2016, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
html_theme = 'openstackdocs'
html_theme_options = {
'show_other_versions': True
}
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-octaviaclientdoc'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'python-octaviaclient.tex',
u'python-octaviaclient Documentation',
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
#intersphinx_mapping = {'http://docs.python.org/': None}
# openstackdocstheme options
repository_name = 'openstack/python-octaviaclient'
bug_project = '911'
bug_tag = 'doc'
autoprogram_cliff_application = 'openstack'
| 30.772727 | 79 | 0.710487 |
acf6151736e9f505a97ad058d91cf5f34d49cd89 | 6,360 | py | Python | venv/lib/python2.7/site-packages/nltk/parse/util.py | sravani-m/Web-Application-Security-Framework | d9f71538f5cba6fe1d8eabcb26c557565472f6a6 | [
"MIT"
] | 3 | 2019-04-09T22:59:33.000Z | 2019-06-14T09:23:24.000Z | nltk/parse/util.py | guker/nltk | 085399ea9d53318ae6e8568909fa55f0d905ad5a | [
"Apache-2.0"
] | null | null | null | nltk/parse/util.py | guker/nltk | 085399ea9d53318ae6e8568909fa55f0d905ad5a | [
"Apache-2.0"
] | null | null | null | # Natural Language Toolkit: Parser Utility Functions
#
# Author: Ewan Klein <ewan@inf.ed.ac.uk>
#
# Copyright (C) 2001-2014 NLTK Project
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Utility functions for parsers.
"""
from __future__ import print_function
from nltk.grammar import CFG, FeatureGrammar, PCFG
from nltk.data import load
from nltk.parse.chart import Chart, ChartParser
from nltk.parse.pchart import InsideChartParser
from nltk.parse.featurechart import FeatureChart, FeatureChartParser
def load_parser(grammar_url, trace=0,
parser=None, chart_class=None,
beam_size=0, **load_args):
"""
Load a grammar from a file, and build a parser based on that grammar.
The parser depends on the grammar format, and might also depend
on properties of the grammar itself.
The following grammar formats are currently supported:
- ``'cfg'`` (CFGs: ``CFG``)
- ``'pcfg'`` (probabilistic CFGs: ``PCFG``)
- ``'fcfg'`` (feature-based CFGs: ``FeatureGrammar``)
:type grammar_url: str
:param grammar_url: A URL specifying where the grammar is located.
The default protocol is ``"nltk:"``, which searches for the file
in the the NLTK data package.
:type trace: int
:param trace: The level of tracing that should be used when
parsing a text. ``0`` will generate no tracing output;
and higher numbers will produce more verbose tracing output.
:param parser: The class used for parsing; should be ``ChartParser``
or a subclass.
If None, the class depends on the grammar format.
:param chart_class: The class used for storing the chart;
should be ``Chart`` or a subclass.
Only used for CFGs and feature CFGs.
If None, the chart class depends on the grammar format.
:type beam_size: int
:param beam_size: The maximum length for the parser's edge queue.
Only used for probabilistic CFGs.
:param load_args: Keyword parameters used when loading the grammar.
See ``data.load`` for more information.
"""
grammar = load(grammar_url, **load_args)
if not isinstance(grammar, CFG):
raise ValueError("The grammar must be a CFG, "
"or a subclass thereof.")
if isinstance(grammar, PCFG):
if parser is None:
parser = InsideChartParser
return parser(grammar, trace=trace, beam_size=beam_size)
elif isinstance(grammar, FeatureGrammar):
if parser is None:
parser = FeatureChartParser
if chart_class is None:
chart_class = FeatureChart
return parser(grammar, trace=trace, chart_class=chart_class)
else: # Plain CFG.
if parser is None:
parser = ChartParser
if chart_class is None:
chart_class = Chart
return parser(grammar, trace=trace, chart_class=chart_class)
######################################################################
#{ Test Suites
######################################################################
class TestGrammar(object):
"""
Unit tests for CFG.
"""
def __init__(self, grammar, suite, accept=None, reject=None):
self.test_grammar = grammar
self.cp = load_parser(grammar, trace=0)
self.suite = suite
self._accept = accept
self._reject = reject
def run(self, show_trees=False):
"""
Sentences in the test suite are divided into two classes:
- grammatical (``accept``) and
- ungrammatical (``reject``).
If a sentence should parse accordng to the grammar, the value of
``trees`` will be a non-empty list. If a sentence should be rejected
according to the grammar, then the value of ``trees`` will be None.
"""
for test in self.suite:
print(test['doc'] + ":", end=' ')
for key in ['accept', 'reject']:
for sent in test[key]:
tokens = sent.split()
trees = list(self.cp.parse(tokens))
if show_trees and trees:
print()
print(sent)
for tree in trees:
print(tree)
if key == 'accept':
if trees == []:
raise ValueError("Sentence '%s' failed to parse'" % sent)
else:
accepted = True
else:
if trees:
raise ValueError("Sentence '%s' received a parse'" % sent)
else:
rejected = True
if accepted and rejected:
print("All tests passed!")
def extract_test_sentences(string, comment_chars="#%;", encoding=None):
"""
Parses a string with one test sentence per line.
Lines can optionally begin with:
- a bool, saying if the sentence is grammatical or not, or
- an int, giving the number of parse trees is should have,
The result information is followed by a colon, and then the sentence.
Empty lines and lines beginning with a comment char are ignored.
:return: a list of tuple of sentences and expected results,
where a sentence is a list of str,
and a result is None, or bool, or int
:param comment_chars: ``str`` of possible comment characters.
:param encoding: the encoding of the string, if it is binary
"""
if encoding is not None:
string = string.decode(encoding)
sentences = []
for sentence in string.split('\n'):
if sentence == '' or sentence[0] in comment_chars:
continue
split_info = sentence.split(':', 1)
result = None
if len(split_info) == 2:
if split_info[0] in ['True','true','False','false']:
result = split_info[0] in ['True','true']
sentence = split_info[1]
else:
result = int(split_info[0])
sentence = split_info[1]
tokens = sentence.split()
if tokens == []:
continue
sentences += [(tokens, result)]
return sentences
# nose thinks it is a test
extract_test_sentences.__test__ = False
| 37.633136 | 86 | 0.584277 |
acf6151ca6d7c21d04d55e5252cb336d21bfd1b6 | 4,359 | py | Python | web_service/cnn_model.py | matheusbfernandes/projeto_4_sd | ad6756b93a269d4863b0b64eeb8c47cdc81ddbd1 | [
"MIT"
] | null | null | null | web_service/cnn_model.py | matheusbfernandes/projeto_4_sd | ad6756b93a269d4863b0b64eeb8c47cdc81ddbd1 | [
"MIT"
] | 5 | 2020-01-28T22:49:22.000Z | 2022-02-10T00:12:20.000Z | web_service/cnn_model.py | matheusbfernandes/Handwritten-Digit-Recognition-Demo | ad6756b93a269d4863b0b64eeb8c47cdc81ddbd1 | [
"MIT"
] | null | null | null | import numpy as np
import keras.backend as k
from keras.datasets import mnist
from keras.models import Sequential, model_from_json
from keras.losses import categorical_crossentropy
from keras.optimizers import Adam
from keras.utils import to_categorical
from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D
from PIL import Image, ImageOps
class Model(object):
def __init__(self, train_new_model=False):
self.x_train = None
self.y_train = None
self.x_test = None
self.y_test = None
if train_new_model:
self.model = self._new_model(128, 10)
self.compute_accuracy()
else:
with open('saved_weights/saved_model.json', 'r') as f:
self.model = model_from_json(f.read())
self.model.load_weights('saved_weights/model_weights.h5')
@staticmethod
def _load_dataset():
(x_train, y_train), (x_test, y_test) = mnist.load_data()
return x_train, y_train, x_test, y_test
def _pre_processing_data(self):
img_rows, img_cols = 28, 28
self.x_train = self.x_train.reshape(self.x_train.shape[0], img_rows, img_cols, 1)
self.x_test = self.x_test.reshape(self.x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
self.x_train = self.x_train.astype('float32')
self.x_test = self.x_test.astype('float32')
self.x_train /= 255
self.x_test /= 255
num_classes = len(np.unique(self.y_train))
self.y_train = to_categorical(self.y_train, num_classes)
self.y_test = to_categorical(self.y_test, num_classes)
return input_shape, num_classes
@staticmethod
def _initialize_model(input_shape, num_classes):
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
return model
def _new_model(self, batch_size, num_epoch):
self.x_train, self.y_train, self.x_test, self.y_test = self._load_dataset()
input_shape, num_classes = self._pre_processing_data()
new_model = self._initialize_model(input_shape, num_classes)
new_model.compile(loss=categorical_crossentropy, optimizer=Adam(), metrics=['accuracy'])
new_model.fit(self.x_train, self.y_train, batch_size=batch_size, epochs=num_epoch, verbose=1,
validation_data=(self.x_test, self.y_test))
return new_model
def compute_accuracy(self):
if self.x_train is None or self.y_train is None or self.x_test is None or self.y_test is None:
self.x_train, self.y_train, self.x_test, self.y_test = self._load_dataset()
_, _ = self._pre_processing_data()
self.model.compile(loss=categorical_crossentropy, optimizer=Adam(), metrics=['accuracy'])
score = self.model.evaluate(self.x_train, self.y_train, verbose=0)
print("using the MNIST Dataset")
print("Train loss:", score[0])
print("Train accuracy:", score[1])
print("-----------------------")
score = self.model.evaluate(self.x_test, self.y_test, verbose=0)
print("Test loss:", score[0])
print("Test accuracy:", score[1])
def inference(self):
img = Image.open("images/savedImage.png")
img.load()
background = Image.new("RGB", img.size, (255, 255, 255))
background.paste(img, mask=img.split()[3])
gray_image = background.convert('L')
gray_image = ImageOps.invert(gray_image)
gray_image = gray_image.resize((28, 28), Image.ANTIALIAS)
gray_image = np.asarray(gray_image, dtype='float32')
gray_image = np.expand_dims(gray_image, axis=0)
gray_image = np.expand_dims(gray_image, axis=-1)
gray_image = gray_image / 255
predicted_value = np.argmax(self.model.predict(gray_image))
k.clear_session()
return predicted_value
def main():
model = Model()
print(model.inference())
if __name__ == '__main__':
main()
| 37.577586 | 102 | 0.653131 |
acf615b02eaeaf6b73f4616d7f1de4154ba0dd30 | 7,581 | py | Python | sync.py | molecular/coinflex-history-sync | 0f77e3f70b73b17fee39f5343dfcfef69d83d7e4 | [
"MIT"
] | 2 | 2021-12-21T16:03:55.000Z | 2022-03-27T02:25:48.000Z | sync.py | molecular/coinflex-history-sync | 0f77e3f70b73b17fee39f5343dfcfef69d83d7e4 | [
"MIT"
] | null | null | null | sync.py | molecular/coinflex-history-sync | 0f77e3f70b73b17fee39f5343dfcfef69d83d7e4 | [
"MIT"
] | null | null | null | import requests
import json
import datetime
import time
import traceback
from config import config
from coinflex import CoinFlex
class History:
def __init__(self):
self.cf = CoinFlex(config['rest_url'], config['rest_path'], config['api_key'], config['api_secret'])
self.data = {}
# load endpoints.json
with open('endpoints.json', 'r') as file:
self.endpoints = json.load(file)
def loadFromFile(self, filename):
try:
with open(filename, 'r') as file:
self.data = json.load(file)
#print(f"data loaded from {filename}: ", json.dumps(self.data, indent=2))
except (FileNotFoundError, json.decoder.JSONDecodeError):
print(f"data file '{filename}' not found or not valid JSON data, starting from empty data")
def dumpToFile(self, filename):
with open(filename, "w") as outfile:
outfile.write(json.dumps(self.data, indent=1))
# sync "accountinfo" endpoint to self.data['accountinfo']
def sync_accountinfo(self):
# request accountinfo and add to data
print("requesting /v2/accountinfo...")
r = self.cf.request('/v2/accountinfo', {})
# print("accountinfo response", r)
# print("accountinfo response.content", r.content)
# print("accountinfo response.json", json.dumps(r.json(), indent=4))
self.data['accountinfo'] = r.json()
# sync data from enpoints given by enpoint_names to self.data
def sync_endpoints(self, endpoint_names):
t_now = int(time.time() * 1000)
for name in endpoint_names:
endpoint = self.endpoints[name]
self.sync_endpoint(endpoint, t_now)
# sync data from given enpoints self.data
def sync_endpoint(self, endpoint, t_now):
print(f"\n*** syncing endpoint {endpoint} ***\n")
name = endpoint['name']
time_field_name = endpoint['time_field_name']
# determine latest_t
if name not in self.data:
self.data[name] = {
'latest_t': None,
'data': []
}
if 'latest_t' in self.data[name] and self.data[name]['latest_t']:
latest_t = self.data[name]['latest_t']
print(f"{name}: using latest_t {latest_t} from data")
elif len(self.data[name]['data']) > 0 and time_field_name:
print(f"{name}: using {time_field_name} for latest_t")
latest_t = max(int(d[time_field_name]) for d in self.data[name]['data'])
else:
latest_t = config['t_account_start']
if latest_t > t_now:
latest_t = t_now
if 'items_config_var' not in endpoint:
items = ['<all>']
else:
items = config[endpoint['items_config_var']]
print(f"--- endpoint '{name}': syncing items {items} ---")
for item in items:
self.sync_endpoint_item(endpoint, item, t_now, latest_t)
# sync data specified by 'item' from given endpoint to self.data
def sync_endpoint_item(self, endpoint, item, t_now, latest_t):
name = endpoint['name']
limit = endpoint['limit']
path = endpoint['path'].format(name=name, item=item)
time_field_name = endpoint['time_field_name']
current_start_t = latest_t
current_period = endpoint['max_period']
print(f"\n--- syncing {name}, item {item}: latest_t = {latest_t} = {datetime.datetime.fromtimestamp(latest_t/1000)} ---\n")
received_data = None
finished = False
while not finished:
try:
params = {
'limit': limit,
'startTime': int(current_start_t),
'endTime': int(current_start_t + current_period)
}
if "params" in endpoint:
params_to_add = endpoint['params']
for key in params_to_add.keys():
params[key] = params_to_add[key].format(name=name, item=item)
if params['endTime'] > t_now:
params['endTime'] = t_now
# fire request
print(f"requesting path {path} with params {params}")
r = self.cf.request(path, params)
#print("response", r)
if r.status_code != 200:
print(f"status_code {r.status_code}, content: {r.content}")
if r.status_code == 429: # rate limit hit
print(f" rate limit encountered, sleeping {endpoint['rate_limit_sleep_s']} seconds...")
time.sleep(endpoint['rate_limit_sleep_s'])
else:
raise Exception(f"HTTP Status Code {r.status_code}, aborting (will store data)")
else:
received_json = r.json()
# temporary hack to get around behaviour introduced 4/15 2022 that api throws error 20001 when there is no data
if "success" in received_json and "code" in received_json and "message" in received_json:
print('looks like error response')
if received_json["success"] == False and received_json["code"] == "20001" and received_json["message"] == "result not found, please check your parameters":
print('special hack to ignore error code 20001')
received_json["data"] = []
if "data" not in received_json:
print("ERROR from api, response:")
print(json.dumps(received_json, indent=2))
else:
received_data = received_json["data"]
print(f" requested {path} with {params}...")
#print("type(received_data): ", type(received_data))
# work around issue A6 removing redeem operations still in progess. This can be removed when A6 is fixed by coinflex
# NOTE: this introduces danger of missing a redeem that is still in progress in case startTime/endTime filter is on requestedAt.
# In case filter is on redeemetAt it should be fine
if name == 'redeem':
received_data = [d for d in received_data if d["redeemedAt"] != d["requestedAt"]]
if received_data == None:
print("no data received (not even empty), probably error")
print("response.json", json.dumps(r.json(), indent=4))
else:
print(f" received {len(received_data)}/{limit} items")
# adjust time interval parameters
if len(received_data) == limit: # limit was hit exactly
raise Exception(f"limit hit, due to issue A5 we have to abort, consider reducing max_period in endpoints.json for endpoint named '{name}'")
'''
# latest_t is taken from received_data
self.data[name]['latest_t'] = max(int(d[time_field_name]) for d in received_data)
print(f"self.data[name]['latest_t'] = {self.data[name]['latest_t']}")
# store all items except the ones with latest timestamp
# there could be more non-delivered items with that timestamp,...
for d in received_data:
if True or int(d[time_field_name]) != self.data[name]['latest_t']:
print(f"storing {d}")
self.data[name]['data'].append(d)
else:
print(f"skipping storage of {d}")
# so we need to include that timestamp as startTime in next request
current_start_t = self.data[name]['latest_t']
'''
elif len(received_data) >= 0:
# latest_t is set to endTime of request
# is this problematic due to possible clock difference local vs. server (TODO)?
self.data[name]['latest_t'] = params['endTime']
# append data to storage
self.data[name]['data'] += received_data
# next request can used endTime + 1 as startTime
current_start_t = self.data[name]['latest_t'] + 1
#print(" new current_start_t: ", datetime.datetime.fromtimestamp(current_start_t/1000))
if current_start_t >= t_now:
finished = True
except (KeyboardInterrupt, Exception) as ex:
print("ABORT due to", ex)
traceback.print_exc()
finished = True
# instantiate history, load data from file, sync and dump back to same file
history = History()
history.loadFromFile(config['coinflex_data_filename'])
#history.sync_accountinfo()
history.sync_endpoints(config['endpoints_to_sync'].split(","))
history.dumpToFile(config['coinflex_data_filename'])
| 36.623188 | 161 | 0.674449 |
acf615e2f6f08c5b348ded0c7465e1d204ed33ad | 4,617 | py | Python | tests/unittest/utils/test_utils.py | cogitoergoread/muszi-macrohard.hu | e9bbd36b789e670f96622a3a2ba8327f0d897561 | [
"MIT"
] | 1 | 2021-05-27T03:40:37.000Z | 2021-05-27T03:40:37.000Z | tests/unittest/utils/test_utils.py | cogitoergoread/muszi-macrohard.hu | e9bbd36b789e670f96622a3a2ba8327f0d897561 | [
"MIT"
] | null | null | null | tests/unittest/utils/test_utils.py | cogitoergoread/muszi-macrohard.hu | e9bbd36b789e670f96622a3a2ba8327f0d897561 | [
"MIT"
] | 1 | 2022-02-24T11:25:22.000Z | 2022-02-24T11:25:22.000Z | import unittest
import numpy as np
from rlcard3.utils.utils import get_random_cards, init_54_deck, init_standard_deck, is_in_cards, is_pair, is_single, rank2int, take_out_cards, print_card, elegent_form, init_players, get_upstream_player_id, get_downstream_player_id, reorganize, set_global_seed, get_cards_from_ranks,tournament
import rlcard3
from rlcard3.agents.random_agent import RandomAgent
from rlcard3.core import Card, Player
class TestUtils(unittest.TestCase):
def test_init_standard_deck(self):
self.assertEqual(len(init_standard_deck()), 52)
def test_init_54_deck(self):
self.assertEqual(len(init_54_deck()), 54)
def test_get_random_cards(self):
hand = init_54_deck()
num = 10
chosen_cards, remained_cards = get_random_cards(hand, num)
self.assertEqual(len(chosen_cards), num)
self.assertEqual(len(remained_cards), len(hand) - num)
with self.assertRaises(AssertionError):
get_random_cards(hand, 1000)
with self.assertRaises(AssertionError):
get_random_cards(hand, -1)
def test_is_pair(self):
self.assertTrue(is_pair([Card('S', 'A'), Card('D', 'A')]))
self.assertFalse(is_pair([Card('BJ', ''), Card('S', 'A'), Card('D', 'A')]))
def test_is_single(self):
self.assertTrue(is_single([Card('S', 'A')]))
self.assertFalse(is_single([Card('S', 'A'), Card('BJ', '')]))
def test_rank2int(self):
self.assertEqual(rank2int('A'), 14)
self.assertEqual(rank2int(''), -1)
self.assertEqual(rank2int('3'), 3)
self.assertEqual(rank2int('T'), 10)
self.assertEqual(rank2int('J'), 11)
self.assertEqual(rank2int('Q'), 12)
self.assertEqual(rank2int('1000'), None)
self.assertEqual(rank2int('abc123'), None)
self.assertEqual(rank2int('K'), 13)
def test_get_cards_from_ranks(self):
deck = init_54_deck()
player = Player(0)
player.hand = deck
test_ranks = ['A', '2', '3']
chosen_cards, remained_cards = get_cards_from_ranks(player, test_ranks)
self.assertEqual(len(chosen_cards), 12)
for card in chosen_cards:
flag = True
if card.rank in test_ranks:
flag = False
self.assertFalse(flag)
self.assertEqual(len(remained_cards), len(deck) - 12)
self.assertEqual(len(chosen_cards), 12)
def test_take_out_cards(self):
cards = init_54_deck()
remove_cards = [Card('S', 'A'), Card('BJ', '')]
res = take_out_cards(cards, remove_cards)
flag = False
for card in res:
if card.get_index() == 'SA' or card.get_index == 'BJ':
flag = True
self.assertFalse(flag)
self.assertEqual(len(cards), len(init_54_deck()) - 2)
def test_is_in_cards(self):
deck54 = init_54_deck()
deck_standard = init_standard_deck()
deck54_plus_BJ = init_54_deck()
deck54_plus_BJ.append(Card('BJ', ''))
self.assertTrue(is_in_cards(deck54, deck_standard))
self.assertTrue(is_in_cards(deck54, [Card('BJ', ''), Card('RJ', '')]))
self.assertFalse(is_in_cards(deck54, [Card('BJ', ''), Card('BJ', '')]))
self.assertFalse(is_in_cards(deck54, [Card('BJ', ''), Card('BJ', ''), Card('D', '3')]))
self.assertTrue(is_in_cards(deck54_plus_BJ, [Card('BJ', ''), Card('BJ', ''), Card('D', '3')]))
def test_print_cards(self):
self.assertEqual(len(elegent_form('S9')), 2)
self.assertEqual(len(elegent_form('ST')), 3)
print_card(None)
print_card('S9')
print_card('ST')
def test_init_players(self):
self.assertTrue(len(init_players(5)), 5)
def test_get_upstream_player_id(self):
players = init_players(5)
self.assertEqual(get_upstream_player_id(players[0], players), 4)
def test_get_downstream_player_id(self):
players = init_players(5)
self.assertEqual(get_downstream_player_id(players[4], players), 0)
def test_reorganize(self):
trajectories = reorganize([[[1,2],1,[4,5]]], [1])
self.assertEqual(np.array(trajectories).shape, (1, 1, 5))
def test_set_global_seed(self):
set_global_seed(0)
self.assertEqual(np.random.get_state()[1][0], 0)
def test_tournament(self):
env = rlcard3.make('leduc-holdem')
env.set_agents([RandomAgent(env.action_num), RandomAgent(env.action_num)])
payoffs = tournament(env,1000)
self.assertEqual(len(payoffs), 2)
if __name__ == '__main__':
unittest.main()
| 37.844262 | 293 | 0.635261 |
acf6163d0dbcf331b18f3caa995ff0b6d87558a7 | 2,675 | py | Python | 3-ANALYZE/review_raw_data.py | OpenPropagandaFramework/opf | 6e4dc29bc8ae1c34b67ebc58d222ef21329a546a | [
"MIT"
] | 1 | 2019-08-12T23:00:11.000Z | 2019-08-12T23:00:11.000Z | 3-ANALYZE/review_raw_data.py | OpenPropagandaFramework/opf | 6e4dc29bc8ae1c34b67ebc58d222ef21329a546a | [
"MIT"
] | null | null | null | 3-ANALYZE/review_raw_data.py | OpenPropagandaFramework/opf | 6e4dc29bc8ae1c34b67ebc58d222ef21329a546a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import psycopg2
import ujson as json
import datetime
def database_connection():
postgresconn = psycopg2.connect("dbname='prop' user='CHANGEME' password='CHANGEME' host='CHANGEME'")
postgresconn.autocommit = True
postgrescur = postgresconn.cursor()
return postgrescur, postgresconn
def hashtag_exists(hashtag, postgrescur):
postgrescur.execute("SELECT * FROM ioc_hashtags WHERE ioc_hashtag = %s", (hashtag,))
return postgrescur.fetchone() is not None
def domain_exists(domain, postgrescur):
postgrescur.execute("SELECT * FROM ioc_domains WHERE ioc_domain = %s", (domain,))
return postgrescur.fetchone() is not None
def url_exists(url, postgrescur):
postgrescur.execute("SELECT * FROM ioc_urls WHERE ioc_url = %s", (url,))
return postgrescur.fetchone() is not None
def review_raw_hashtags():
postgrescur, postgresconn = database_connection()
hashtag_analysis_sql = 'SELECT hashtag, count(*) FROM hashtags GROUP BY hashtag ORDER BY count(*) DESC LIMIT 500'
postgrescur.execute(hashtag_analysis_sql)
top_hashtags = postgrescur.fetchall()
hashtags_list = []
for hashtag in top_hashtags:
ioc_exists = hashtag_exists(hashtag[0], postgrescur)
if ioc_exists == False:
hashtag_dict = {"hashtag": hashtag[0], "sightings": hashtag[1], "class": "CHANGEME"}
hashtags_list.append(hashtag_dict)
print(json.dumps(hashtags_list))
postgrescur.close()
postgresconn.close()
def review_raw_domains():
postgrescur, postgresconn = database_connection()
domain_analysis_sql = 'SELECT link_domain, count(*) FROM links GROUP BY link_domain ORDER BY count(*) DESC LIMIT 500'
postgrescur.execute(domain_analysis_sql)
top_domains = postgrescur.fetchall()
domains_list = []
for domain in top_domains:
ioc_exists = domain_exists(domain[0], postgrescur)
if ioc_exists == False:
domains_dict = {"domain": domain[0], "sightings": domain[1], "class": "CHANGEME"}
domains_list.append(domains_dict)
print(json.dumps(domains_list))
postgrescur.close()
postgresconn.close()
def review_raw_urls():
postgrescur, postgresconn = database_connection()
url_analysis_sql = 'SELECT link_url, count(*) FROM links GROUP BY link_url ORDER BY count(*) DESC LIMIT 500'
postgrescur.execute(url_analysis_sql)
top_urls = postgrescur.fetchall()
urls_list = []
for url in top_urls:
ioc_exists = url_exists(url[0], postgrescur)
if ioc_exists == False:
url_dict = {"url": url[0], "sightings": url[1], "class": "CHANGEME"}
urls_list.append(url_dict)
print(json.dumps(urls_list))
postgrescur.close()
postgresconn.close()
review_raw_hashtags()
#review_raw_domains()
#review_raw_urls()
| 35.666667 | 118 | 0.748785 |
acf6165078e6fb49fbaf43f1e59668d0c7cd9c87 | 9,899 | py | Python | ppdet/data/source/coco.py | violetweir/PaddleDetection | ae219e643cae9d016d7a66a49d79f63e6c7c93e5 | [
"Apache-2.0"
] | 3 | 2021-07-31T11:52:11.000Z | 2022-01-06T13:32:07.000Z | ppdet/data/source/coco.py | siqi-yang/PaddleDetection | d7383ad99c69e03f984ead52cc645d17f4729837 | [
"Apache-2.0"
] | 1 | 2021-12-22T02:00:50.000Z | 2021-12-22T02:00:50.000Z | ppdet/data/source/coco.py | siqi-yang/PaddleDetection | d7383ad99c69e03f984ead52cc645d17f4729837 | [
"Apache-2.0"
] | 1 | 2021-12-21T12:55:44.000Z | 2021-12-21T12:55:44.000Z | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import numpy as np
from ppdet.core.workspace import register, serializable
from .dataset import DetDataset
from ppdet.utils.logger import setup_logger
logger = setup_logger(__name__)
@register
@serializable
class COCODataSet(DetDataset):
"""
Load dataset with COCO format.
Args:
dataset_dir (str): root directory for dataset.
image_dir (str): directory for images.
anno_path (str): coco annotation file path.
data_fields (list): key name of data dictionary, at least have 'image'.
sample_num (int): number of samples to load, -1 means all.
load_crowd (bool): whether to load crowded ground-truth.
False as default
allow_empty (bool): whether to load empty entry. False as default
empty_ratio (float): the ratio of empty record number to total
record's, if empty_ratio is out of [0. ,1.), do not sample the
records. 1. as default
"""
def __init__(self,
dataset_dir=None,
image_dir=None,
anno_path=None,
data_fields=['image'],
sample_num=-1,
load_crowd=False,
allow_empty=False,
empty_ratio=1.):
super(COCODataSet, self).__init__(dataset_dir, image_dir, anno_path,
data_fields, sample_num)
self.load_image_only = False
self.load_semantic = False
self.load_crowd = load_crowd
self.allow_empty = allow_empty
self.empty_ratio = empty_ratio
def _sample_empty(self, records, num):
# if empty_ratio is out of [0. ,1.), do not sample the records
if self.empty_ratio < 0. or self.empty_ratio >= 1.:
return records
import random
sample_num = int(num * self.empty_ratio / (1 - self.empty_ratio))
records = random.sample(records, sample_num)
return records
def parse_dataset(self):
anno_path = os.path.join(self.dataset_dir, self.anno_path)
image_dir = os.path.join(self.dataset_dir, self.image_dir)
assert anno_path.endswith('.json'), \
'invalid coco annotation file: ' + anno_path
from pycocotools.coco import COCO
coco = COCO(anno_path)
img_ids = coco.getImgIds()
img_ids.sort()
cat_ids = coco.getCatIds()
records = []
empty_records = []
ct = 0
self.catid2clsid = dict({catid: i for i, catid in enumerate(cat_ids)})
self.cname2cid = dict({
coco.loadCats(catid)[0]['name']: clsid
for catid, clsid in self.catid2clsid.items()
})
if 'annotations' not in coco.dataset:
self.load_image_only = True
logger.warning('Annotation file: {} does not contains ground truth '
'and load image information only.'.format(anno_path))
for img_id in img_ids:
img_anno = coco.loadImgs([img_id])[0]
im_fname = img_anno['file_name']
im_w = float(img_anno['width'])
im_h = float(img_anno['height'])
im_path = os.path.join(image_dir,
im_fname) if image_dir else im_fname
is_empty = False
if not os.path.exists(im_path):
logger.warning('Illegal image file: {}, and it will be '
'ignored'.format(im_path))
continue
if im_w < 0 or im_h < 0:
logger.warning('Illegal width: {} or height: {} in annotation, '
'and im_id: {} will be ignored'.format(
im_w, im_h, img_id))
continue
coco_rec = {
'im_file': im_path,
'im_id': np.array([img_id]),
'h': im_h,
'w': im_w,
} if 'image' in self.data_fields else {}
if not self.load_image_only:
ins_anno_ids = coco.getAnnIds(
imgIds=[img_id], iscrowd=None if self.load_crowd else False)
instances = coco.loadAnns(ins_anno_ids)
bboxes = []
is_rbox_anno = False
for inst in instances:
# check gt bbox
if inst.get('ignore', False):
continue
if 'bbox' not in inst.keys():
continue
else:
if not any(np.array(inst['bbox'])):
continue
# read rbox anno or not
is_rbox_anno = True if len(inst['bbox']) == 5 else False
if is_rbox_anno:
xc, yc, box_w, box_h, angle = inst['bbox']
x1 = xc - box_w / 2.0
y1 = yc - box_h / 2.0
x2 = x1 + box_w
y2 = y1 + box_h
else:
x1, y1, box_w, box_h = inst['bbox']
x2 = x1 + box_w
y2 = y1 + box_h
eps = 1e-5
if inst['area'] > 0 and x2 - x1 > eps and y2 - y1 > eps:
inst['clean_bbox'] = [
round(float(x), 3) for x in [x1, y1, x2, y2]
]
if is_rbox_anno:
inst['clean_rbox'] = [xc, yc, box_w, box_h, angle]
bboxes.append(inst)
else:
logger.warning(
'Found an invalid bbox in annotations: im_id: {}, '
'area: {} x1: {}, y1: {}, x2: {}, y2: {}.'.format(
img_id, float(inst['area']), x1, y1, x2, y2))
num_bbox = len(bboxes)
if num_bbox <= 0 and not self.allow_empty:
continue
elif num_bbox <= 0:
is_empty = True
gt_bbox = np.zeros((num_bbox, 4), dtype=np.float32)
if is_rbox_anno:
gt_rbox = np.zeros((num_bbox, 5), dtype=np.float32)
gt_theta = np.zeros((num_bbox, 1), dtype=np.int32)
gt_class = np.zeros((num_bbox, 1), dtype=np.int32)
is_crowd = np.zeros((num_bbox, 1), dtype=np.int32)
difficult = np.zeros((num_bbox, 1), dtype=np.int32)
gt_poly = [None] * num_bbox
has_segmentation = False
for i, box in enumerate(bboxes):
catid = box['category_id']
gt_class[i][0] = self.catid2clsid[catid]
gt_bbox[i, :] = box['clean_bbox']
# xc, yc, w, h, theta
if is_rbox_anno:
gt_rbox[i, :] = box['clean_rbox']
is_crowd[i][0] = box['iscrowd']
# check RLE format
if 'segmentation' in box and box['iscrowd'] == 1:
gt_poly[i] = [[0.0, 0.0], ]
elif 'segmentation' in box and box['segmentation']:
gt_poly[i] = box['segmentation']
has_segmentation = True
if has_segmentation and not any(
gt_poly) and not self.allow_empty:
continue
if is_rbox_anno:
gt_rec = {
'is_crowd': is_crowd,
'gt_class': gt_class,
'gt_bbox': gt_bbox,
'gt_rbox': gt_rbox,
'gt_poly': gt_poly,
}
else:
gt_rec = {
'is_crowd': is_crowd,
'gt_class': gt_class,
'gt_bbox': gt_bbox,
'gt_poly': gt_poly,
}
for k, v in gt_rec.items():
if k in self.data_fields:
coco_rec[k] = v
# TODO: remove load_semantic
if self.load_semantic and 'semantic' in self.data_fields:
seg_path = os.path.join(self.dataset_dir, 'stuffthingmaps',
'train2017', im_fname[:-3] + 'png')
coco_rec.update({'semantic': seg_path})
logger.debug('Load file: {}, im_id: {}, h: {}, w: {}.'.format(
im_path, img_id, im_h, im_w))
if is_empty:
empty_records.append(coco_rec)
else:
records.append(coco_rec)
ct += 1
if self.sample_num > 0 and ct >= self.sample_num:
break
assert ct > 0, 'not found any coco record in %s' % (anno_path)
logger.debug('{} samples in file {}'.format(ct, anno_path))
if len(empty_records) > 0:
empty_records = self._sample_empty(empty_records, len(records))
records += empty_records
self.roidbs = records
| 40.904959 | 80 | 0.490454 |
acf6171477370ad8148cb6cad3c1d6185ef4a85d | 537 | py | Python | blog/migrations/0001_initial.py | nimapmi/Django_try001 | bd607745e4a261ceb466c67959f79d01a8b1d386 | [
"MIT"
] | null | null | null | blog/migrations/0001_initial.py | nimapmi/Django_try001 | bd607745e4a261ceb466c67959f79d01a8b1d386 | [
"MIT"
] | null | null | null | blog/migrations/0001_initial.py | nimapmi/Django_try001 | bd607745e4a261ceb466c67959f79d01a8b1d386 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.9 on 2021-11-06 14:07
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('content', models.TextField()),
],
),
]
| 23.347826 | 117 | 0.564246 |
acf617b2f644c8587985b74fb5317894da344ab6 | 6,680 | py | Python | server/routes.py | Taru-garg/my-Phone | 161891afcb60f7a7f93593371f1837bbd647735d | [
"MIT"
] | 2 | 2020-11-23T06:13:11.000Z | 2021-09-21T19:39:33.000Z | server/routes.py | Taru-garg/my-Phone | 161891afcb60f7a7f93593371f1837bbd647735d | [
"MIT"
] | 1 | 2020-11-23T06:10:23.000Z | 2020-11-23T06:11:00.000Z | server/routes.py | Taru-garg/my-Phone | 161891afcb60f7a7f93593371f1837bbd647735d | [
"MIT"
] | 3 | 2020-11-16T14:21:57.000Z | 2021-06-09T05:31:04.000Z | from imports import *
from flask import make_response, jsonify, flash, send_from_directory
from werkzeug.utils import secure_filename
app = Flask(
__name__, static_url_path="/storage/emulated/0", static_folder="/storage/emulated/0"
)
UPLOAD_FOLDER = "/storage/emulated/0"
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.secret_key = "banana"
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route("/")
@app.route("/home")
def home():
return render_template(
"home_1.html",
title="Home",
)
@app.errorhandler(404)
def not_found(e):
return render_template("404.html")
@app.route("/photos", methods=["POST", "GET"])
def photos():
all_files = []
for photo in photos_dir:
files_ = list_files(photo)
for a_file in files_:
all_files.append(a_file)
photos = []
for photo in all_files:
if photo.rpartition(".")[-1] in photolst:
photos.append(photo)
return render_template(
"photos.html",
title="Photos",
photos=photos,
length=len(photos),
len_dec=int(len(photos) / 100),
)
@app.route("/documents")
def documents():
all_files = []
all_files_names = []
for doc in document_dir:
files_ = list_files(doc)
files_names = list_files_name(doc)
for a_file in files_:
all_files.append(a_file)
for a_file_name in files_names:
all_files_names.append(a_file_name)
documents = []
documents_names = []
for i in range(0, len(all_files)):
if all_files[i].rpartition(".")[2] in doclst:
documents.append(all_files[i])
documents_names.append(all_files_names[i])
return render_template(
"document.html",
title="Document",
documents=documents,
len=len(documents),
document_name=documents_names,
)
@app.route("/music")
def music():
all_files = []
all_files_names = []
for music_ in music_dir:
files_ = list_files(music_)
files_names = list_files_name_shortened(music_)
for a_file in files_:
all_files.append(a_file)
for a_file_name in files_names:
all_files_names.append(a_file_name)
ids = []
music = []
music_names = []
for i in range(0, len(all_files)):
if all_files[i].rpartition(".")[2] in musiclst:
music.append(all_files[i])
music_names.append(all_files_names[i])
for i in range(1, len(music) + 1):
ids.append(i)
return render_template(
"music.html",
title="Music",
music=music,
len=len(music),
music_name=music_names,
ids=ids,
)
@app.route("/video")
def video():
all_files = []
all_files_names = []
for video_ in video_dir:
files_ = list_files(video_)
files_names = list_files_name(video_)
for a_file in files_:
all_files.append(a_file)
for a_file_name in files_names:
all_files_names.append(a_file_name)
videos = []
video_names = []
for i in range(0, len(all_files)):
if all_files[i].rpartition(".")[2] in videolst:
videos.append(all_files[i])
video_names.append(all_files_names[i])
return render_template(
"video(1).html",
title="Video",
videos=videos,
len=len(videos),
video_names=video_names,
)
@app.route('/upload', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('uploaded_file',
filename=filename))
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form method=post enctype=multipart/form-data>
<input type=file name=file>
<input type=submit value=Upload>
</form>
'''
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename)
@app.route("/findPhone", methods=["GET", "POST"])
def findPhone():
if request.method == "POST":
passed = request.form["data"]
if passed == "Play":
try:
os.system("termux-media-player play /storage/emulated/0/SoftwareEngineeringProject/server/iphone_6-30.ogg")
return {"Message": "Playing"}
except:
pass
else:
try:
os.system("termux-media-player stop")
return {"Message": "Stopped"}
except:
pass
return redirect("/home")
@app.route("/notification",methods=["GET","POST"])
def notif():
notifs = subprocess.check_output("termux-notification-list")
notifs = str(notifs.decode("utf8"))
if request.method == "POST":
return notifs
return render_template("notif.html", title="Notifications", notifs=notifs)
@app.route("/getBattery", methods=["GET", "POST"])
def getBattery():
if request.method == "POST":
return jsonify({"Message": battery()})
return redirect("/home")
@app.route("/contact")
def contact():
contacts = subprocess.check_output("termux-contact-list")
contact = str(contacts.decode("utf8"))
return render_template(
"contact.html",
title="Contacts",
contacts=contact,
)
@app.route("/call", methods=["POST", "GET"])
def call():
to_call = request.form["phone"]
try:
os.system("termux-telephony-call " + to_call)
except:
pass
return redirect("/home")
@app.route("/clipboard", methods=["GET", "POST"])
def get_clipboard():
if request.method == "GET":
return jsonify(
{
"Message": str(
subprocess.check_output("termux-clipboard-get").decode("utf8")
)
}
)
return redirect("/home")
| 28.305085 | 123 | 0.591168 |
acf617e7dfd50036eef691eeb4087c10c4a60d14 | 123 | py | Python | custom_components/iphonedetect/const.py | systemcrash/iphonedetect | b2a4a94b3ef6842b2980cd1b7fd01c1e43345474 | [
"MIT"
] | 189 | 2019-07-28T04:12:42.000Z | 2022-03-25T02:41:05.000Z | custom_components/iphonedetect/const.py | systemcrash/iphonedetect | b2a4a94b3ef6842b2980cd1b7fd01c1e43345474 | [
"MIT"
] | 60 | 2019-07-02T03:30:02.000Z | 2022-03-23T22:08:59.000Z | custom_components/iphonedetect/const.py | shlomifgm/HomeAssistant | 6b86b81c33cdd9b0d6a9af0dc74b163f0aeb2112 | [
"MIT"
] | 29 | 2019-07-28T15:55:24.000Z | 2022-03-31T20:38:18.000Z | HOME_STATES={
2: "REACHABLE",
8: "DELAY",
# 4: "STALE",
}
CONST_MESSAGE = b"Marco"
CONST_MESSAGE_PORT = 5353
| 13.666667 | 25 | 0.601626 |
acf618297f86394d2227a57f84a81e1ffdc86d96 | 11,933 | py | Python | model_zoo/wide_and_deep/src/process_data.py | ZephyrChenzf/mindspore | 8f191847cf71e12715ced96bc3575914f980127a | [
"Apache-2.0"
] | 2 | 2020-08-12T16:14:40.000Z | 2020-12-04T03:05:57.000Z | model_zoo/official/recommend/wide_and_deep/src/process_data.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
] | null | null | null | model_zoo/official/recommend/wide_and_deep/src/process_data.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Criteo data process
"""
import os
import pickle
import collections
import argparse
import numpy as np
import pandas as pd
TRAIN_LINE_COUNT = 45840617
TEST_LINE_COUNT = 6042135
class CriteoStatsDict():
"""create data dict"""
def __init__(self):
self.field_size = 39 # value_1-13; cat_1-26;
self.val_cols = ["val_{}".format(i+1) for i in range(13)]
self.cat_cols = ["cat_{}".format(i+1) for i in range(26)]
#
self.val_min_dict = {col: 0 for col in self.val_cols}
self.val_max_dict = {col: 0 for col in self.val_cols}
self.cat_count_dict = {col: collections.defaultdict(int) for col in self.cat_cols}
#
self.oov_prefix = "OOV_"
self.cat2id_dict = {}
self.cat2id_dict.update({col: i for i, col in enumerate(self.val_cols)})
self.cat2id_dict.update({self.oov_prefix + col: i + len(self.val_cols) for i, col in enumerate(self.cat_cols)})
#
def stats_vals(self, val_list):
"""vals status"""
assert len(val_list) == len(self.val_cols)
def map_max_min(i, val):
key = self.val_cols[i]
if val != "":
if float(val) > self.val_max_dict[key]:
self.val_max_dict[key] = float(val)
if float(val) < self.val_min_dict[key]:
self.val_min_dict[key] = float(val)
#
for i, val in enumerate(val_list):
map_max_min(i, val)
#
def stats_cats(self, cat_list):
assert len(cat_list) == len(self.cat_cols)
def map_cat_count(i, cat):
key = self.cat_cols[i]
self.cat_count_dict[key][cat] += 1
#
for i, cat in enumerate(cat_list):
map_cat_count(i, cat)
#
def save_dict(self, output_path, prefix=""):
with open(os.path.join(output_path, "{}val_max_dict.pkl".format(prefix)), "wb") as file_wrt:
pickle.dump(self.val_max_dict, file_wrt)
with open(os.path.join(output_path, "{}val_min_dict.pkl".format(prefix)), "wb") as file_wrt:
pickle.dump(self.val_min_dict, file_wrt)
with open(os.path.join(output_path, "{}cat_count_dict.pkl".format(prefix)), "wb") as file_wrt:
pickle.dump(self.cat_count_dict, file_wrt)
#
def load_dict(self, dict_path, prefix=""):
with open(os.path.join(dict_path, "{}val_max_dict.pkl".format(prefix)), "rb") as file_wrt:
self.val_max_dict = pickle.load(file_wrt)
with open(os.path.join(dict_path, "{}val_min_dict.pkl".format(prefix)), "rb") as file_wrt:
self.val_min_dict = pickle.load(file_wrt)
with open(os.path.join(dict_path, "{}cat_count_dict.pkl".format(prefix)), "rb") as file_wrt:
self.cat_count_dict = pickle.load(file_wrt)
print("val_max_dict.items()[:50]: {}".format(list(self.val_max_dict.items())))
print("val_min_dict.items()[:50]: {}".format(list(self.val_min_dict.items())))
#
#
def get_cat2id(self, threshold=100):
"""get cat to id"""
# before_all_count = 0
# after_all_count = 0
for key, cat_count_d in self.cat_count_dict.items():
new_cat_count_d = dict(filter(lambda x: x[1] > threshold, cat_count_d.items()))
for cat_str, _ in new_cat_count_d.items():
self.cat2id_dict[key + "_" + cat_str] = len(self.cat2id_dict)
# print("before_all_count: {}".format(before_all_count)) # before_all_count: 33762577
# print("after_all_count: {}".format(after_all_count)) # after_all_count: 184926
print("cat2id_dict.size: {}".format(len(self.cat2id_dict)))
print("cat2id_dict.items()[:50]: {}".format(self.cat2id_dict.items()[:50]))
#
def map_cat2id(self, values, cats):
"""map cat to id"""
def minmax_sclae_value(i, val):
# min_v = float(self.val_min_dict["val_{}".format(i+1)])
max_v = float(self.val_max_dict["val_{}".format(i + 1)])
# return (float(val) - min_v) * 1.0 / (max_v - min_v)
return float(val) * 1.0 / max_v
id_list = []
weight_list = []
for i, val in enumerate(values):
if val == "":
id_list.append(i)
weight_list.append(0)
else:
key = "val_{}".format(i + 1)
id_list.append(self.cat2id_dict[key])
weight_list.append(minmax_sclae_value(i, float(val)))
#
for i, cat_str in enumerate(cats):
key = "cat_{}".format(i + 1) + "_" + cat_str
if key in self.cat2id_dict:
id_list.append(self.cat2id_dict[key])
else:
id_list.append(self.cat2id_dict[self.oov_prefix + "cat_{}".format(i + 1)])
weight_list.append(1.0)
return id_list, weight_list
#
def mkdir_path(file_path):
if not os.path.exists(file_path):
os.makedirs(file_path)
#
def statsdata(data_file_path, output_path, criteo_stats):
"""data status"""
with open(data_file_path, encoding="utf-8") as file_in:
errorline_list = []
count = 0
for line in file_in:
count += 1
line = line.strip("\n")
items = line.strip("\t")
if len(items) != 40:
errorline_list.append(count)
print("line: {}".format(line))
continue
if count % 1000000 == 0:
print("Have handle {}w lines.".format(count//10000))
# if count % 5000000 == 0:
# print("Have handle {}w lines.".format(count//10000))
# label = items[0]
values = items[1:14]
cats = items[14:]
assert len(values) == 13, "value.size: {}".format(len(values))
assert len(cats) == 26, "cat.size: {}".format(len(cats))
criteo_stats.stats_vals(values)
criteo_stats.stats_cats(cats)
criteo_stats.save_dict(output_path)
#
def add_write(file_path, wr_str):
with open(file_path, "a", encoding="utf-8") as file_out:
file_out.write(wr_str + "\n")
#
def random_split_trans2h5(in_file_path, output_path, criteo_stats, part_rows=2000000, test_size=0.1, seed=2020):
"""random split trans2h5"""
test_size = int(TRAIN_LINE_COUNT * test_size)
# train_size = TRAIN_LINE_COUNT - test_size
all_indices = [i for i in range(TRAIN_LINE_COUNT)]
np.random.seed(seed)
np.random.shuffle(all_indices)
print("all_indices.size: {}".format(len(all_indices)))
# lines_count_dict = collections.defaultdict(int)
test_indices_set = set(all_indices[:test_size])
print("test_indices_set.size: {}".format(len(test_indices_set)))
print("------" * 10 + "\n" * 2)
train_feature_file_name = os.path.join(output_path, "train_input_part_{}.h5")
train_label_file_name = os.path.join(output_path, "train_output_part_{}.h5")
test_feature_file_name = os.path.join(output_path, "test_input_part_{}.h5")
test_label_file_name = os.path.join(output_path, "test_input_part_{}.h5")
train_feature_list = []
train_label_list = []
test_feature_list = []
test_label_list = []
with open(in_file_path, encoding="utf-8") as file_in:
count = 0
train_part_number = 0
test_part_number = 0
for i, line in enumerate(file_in):
count += 1
if count % 1000000 == 0:
print("Have handle {}w lines.".format(count // 10000))
line = line.strip("\n")
items = line.split("\t")
if len(items) != 40:
continue
label = float(items[0])
values = items[1:14]
cats = items[14:]
assert len(values) == 13, "value.size: {}".format(len(values))
assert len(cats) == 26, "cat.size: {}".format(len(cats))
ids, wts = criteo_stats.map_cat2id(values, cats)
if i not in test_indices_set:
train_feature_list.append(ids + wts)
train_label_list.append(label)
else:
test_feature_list.append(ids + wts)
test_label_list.append(label)
if train_label_list and (len(train_label_list) % part_rows == 0):
pd.DataFrame(np.asarray(train_feature_list)).to_hdf(train_feature_file_name.format(train_part_number),
key="fixed")
pd.DataFrame(np.asarray(train_label_list)).to_hdf(train_label_file_name.format(train_part_number),
key="fixed")
train_feature_list = []
train_label_list = []
train_part_number += 1
if test_label_list and (len(test_label_list) % part_rows == 0):
pd.DataFrame(np.asarray(test_feature_list)).to_hdf(test_feature_file_name.format(test_part_number),
key="fixed")
pd.DataFrame(np.asarray(test_label_list)).to_hdf(test_label_file_name.format(test_part_number),
key="fixed")
test_feature_list = []
test_label_list = []
test_part_number += 1
#
if train_label_list:
pd.DataFrame(np.asarray(train_feature_list)).to_hdf(train_feature_file_name.format(train_part_number),
key="fixed")
pd.DataFrame(np.asarray(train_label_list)).to_hdf(train_label_file_name.format(train_part_number),
key="fixed")
if test_label_list:
pd.DataFrame(np.asarray(test_feature_list)).to_hdf(test_feature_file_name.format(test_part_number),
key="fixed")
pd.DataFrame(np.asarray(test_label_list)).to_hdf(test_label_file_name.format(test_part_number),
key="fixed")
#
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Get and Process datasets")
parser.add_argument("--raw_data_path", default="./raw_data", help="The path to save dataset")
parser.add_argument("--output_path", default="./output",
help="The path to save dataset")
args, _ = parser.parse_known_args()
base_path = args.raw_data_path
criteo_stat = CriteoStatsDict()
# step 1, stats the vocab and normalize value
datafile_path = base_path + "train_small.txt"
stats_out_path = base_path + "stats_dict/"
mkdir_path(stats_out_path)
statsdata(datafile_path, stats_out_path, criteo_stat)
print("------" * 10)
criteo_stat.load_dict(dict_path=stats_out_path, prefix="")
criteo_stat.get_cat2id(threshold=100)
# step 2, transform data trans2h5; version 2: np.random.shuffle
infile_path = base_path + "train_small.txt"
mkdir_path(args.output_path)
random_split_trans2h5(infile_path, args.output_path, criteo_stat, part_rows=2000000, test_size=0.1, seed=2020)
| 44.360595 | 119 | 0.588704 |
acf61838407dad63511b021786c2f95a1e0176e1 | 24,270 | py | Python | whey/builder.py | Sjord/whey | f06477f32820679bc9e860d77a0f1d46c7adade4 | [
"MIT"
] | null | null | null | whey/builder.py | Sjord/whey | f06477f32820679bc9e860d77a0f1d46c7adade4 | [
"MIT"
] | null | null | null | whey/builder.py | Sjord/whey | f06477f32820679bc9e860d77a0f1d46c7adade4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# builder.py
"""
The actual wheel builder.
"""
#
# Copyright © 2020-2021 Dominic Davis-Foster <dominic@davis-foster.co.uk>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
# stdlib
import os
import pathlib
import posixpath
import re
import shutil
import tarfile
import warnings
from abc import ABC, abstractmethod
from datetime import datetime
from email.headerregistry import Address
from functools import partial
from typing import Any, Dict, Iterator, Mapping, Optional
# 3rd party
import click
import dom_toml
import handy_archives
from consolekit.terminal_colours import ColourTrilean, Fore, resolve_color_default
from dist_meta import entry_points, metadata, wheel
from dist_meta.metadata_mapping import MetadataMapping
from domdf_python_tools.paths import PathPlus, sort_paths, traverse_to_file
from domdf_python_tools.typing import PathLike
from domdf_python_tools.words import word_join
from first import first
from shippinglabel.checksum import get_record_entry
from shippinglabel.requirements import ComparableRequirement, combine_requirements
__all__ = ["AbstractBuilder", "SDistBuilder", "WheelBuilder"]
archive_name_sub_re = re.compile(
r"[^\w\d.]+",
re.UNICODE,
)
class AbstractBuilder(ABC):
"""
Abstract base class for builders of Python distributions using metadata read from ``pyproject.toml``.
:param project_dir: The project to build the distribution for.
:param build_dir: The (temporary) build directory.
:default build_dir: :file:`{<project_dir>}/build/`
:param out_dir: The output directory.
:default out_dir: :file:`{<project_dir>}/dist/`
:param verbose: Whether to enable verbose output.
:param colour: Whether to use coloured output.
.. autosummary-widths:: 1/2
:html: 3/10
.. autoclasssumm:: AbstractBuilder
:autosummary-sections: Attributes
.. latex:clearpage::
.. autosummary-widths:: 7/16
:html: 4/10
.. autoclasssumm:: AbstractBuilder
:autosummary-sections: Methods
"""
def __init__(
self,
project_dir: PathPlus,
config: Mapping[str, Any],
build_dir: Optional[PathLike] = None,
out_dir: Optional[PathLike] = None,
*,
verbose: bool = False,
colour: ColourTrilean = None,
):
# Walk up the tree until a "pyproject.toml" file is found.
#: The pyproject.toml directory
self.project_dir: PathPlus = traverse_to_file(PathPlus(project_dir), "pyproject.toml")
#: Configuration parsed from ``pyproject.toml``.
self.config: Dict[str, Any] = dict(config)
#: The archive name, without the tag
self.archive_name = archive_name_sub_re.sub(
'_',
self.config["name"],
) + f"-{self.config['version']}"
#: The (temporary) build directory.
self.build_dir = PathPlus(build_dir or self.default_build_dir)
self.clear_build_dir()
#: The output directory.
self.out_dir = PathPlus(out_dir or self.default_out_dir)
self.out_dir.maybe_make(parents=True)
#: Whether to enable verbose output.
self.verbose = verbose
#: Whether to use coloured output.
self.colour = resolve_color_default(colour)
self._echo = partial(click.echo, color=self.colour)
@property
def default_build_dir(self) -> PathPlus: # pragma: no cover
"""
Provides a default for the ``build_dir`` argument.
"""
return self.project_dir / "build"
@property
def default_out_dir(self) -> PathPlus: # pragma: no cover
"""
Provides a default for the ``out_dir`` argument.
"""
return self.project_dir / "dist"
@property
def code_directory(self) -> str:
"""
The directory containing the code in the build directory.
"""
return self.config["source-dir"]
def clear_build_dir(self) -> None:
"""
Clear the build directory of any residue from previous builds.
"""
if self.build_dir.is_dir():
shutil.rmtree(self.build_dir)
self.build_dir.maybe_make(parents=True)
def iter_source_files(self) -> Iterator[PathPlus]:
"""
Iterate over the files in the source directory.
"""
pkgdir = self.project_dir / self.config["source-dir"] / self.config["package"]
if not pkgdir.is_dir():
message = f"Package directory {self.config['package']!r} not found"
if self.config["source-dir"]:
raise FileNotFoundError(f"{message} in {self.config['source-dir']!r}.")
else:
raise FileNotFoundError(f"{message}.")
found_file = False
for py_pattern in {"**/*.py", "**/*.pyi", "**/*.pyx", "**/py.typed"}:
for py_file in pkgdir.rglob(py_pattern):
if "__pycache__" not in py_file.parts:
found_file = True
yield py_file
if not found_file:
raise FileNotFoundError(f"No Python source files found in {pkgdir}")
def copy_source(self) -> None:
"""
Copy source files into the build directory.
"""
for py_file in self.iter_source_files():
target = self.build_dir / py_file.relative_to(self.project_dir / self.code_directory)
target.parent.maybe_make(parents=True)
target.write_clean(py_file.read_text())
shutil.copystat(py_file, target)
self.report_copied(py_file, target)
def _echo_if_v(self, *args, **kwargs):
if self.verbose:
self._echo(*args, **kwargs)
def report_copied(self, source: pathlib.Path, target: pathlib.Path) -> None:
"""
Report that a file has been copied into the build directory.
The format is::
Copying {source} -> {target.relative_to(self.build_dir)}
.. latex:vspace:: -5px
:param source: The source file
:param target: The file in the build directory.
"""
self._echo_if_v(
f"Copying {source.resolve().as_posix()} -> {target.relative_to(self.build_dir).as_posix()}"
)
def report_removed(self, removed_file: pathlib.Path) -> None:
"""
Reports the removal of a file from the build directory.
The format is::
Removing {removed_file.relative_to(self.build_dir)}
.. latex:vspace:: -5px
:param removed_file:
"""
self._echo_if_v(f"Removing {removed_file.relative_to(self.build_dir).as_posix()}")
def report_written(self, written_file: pathlib.Path) -> None:
"""
Report that a file has been written to the build directory.
The format is::
Writing {written_file.relative_to(self.build_dir)}
.. latex:vspace:: -5px
:param written_file:
"""
self._echo_if_v(f"Writing {written_file.relative_to(self.build_dir).as_posix()}")
def copy_additional_files(self) -> None:
"""
Copy additional files to the build directory, as specified in the ``additional-files`` key.
"""
self.parse_additional_files(*self.config["additional-files"])
def parse_additional_files(self, *entries: str) -> None: # pylint: disable=useless-return
r"""
Copy additional files to the build directory, by parsing `MANIFEST.in`_-style entries.
.. _MANIFEST.in: https://packaging.python.org/guides/using-manifest-in/
:param \*entries:
"""
def copy_file(filename):
target = self.build_dir / filename.relative_to(self.project_dir / self.code_directory)
target.parent.maybe_make(parents=True)
shutil.copy2(src=filename, dst=target)
self.report_copied(filename, target)
for entry in entries:
parts = entry.split(' ')
if parts[0] == "include":
for include_pat in parts[1:]:
for include_file in sorted(self.project_dir.glob(include_pat)):
if include_file.is_file():
copy_file(filename=include_file)
elif parts[0] == "exclude":
for exclude_pat in parts[1:]:
for exclude_file in sorted(self.build_dir.glob(exclude_pat)):
if exclude_file.is_file():
exclude_file.unlink()
self.report_removed(exclude_file)
elif parts[0] == "recursive-include":
for include_file in sort_paths(*(self.project_dir / parts[1]).rglob(parts[2])):
if "__pycache__" in include_file.parts:
continue
if include_file.is_file():
copy_file(filename=include_file)
elif parts[0] == "recursive-exclude":
for exclude_file in sort_paths(*(self.build_dir / parts[1]).rglob(parts[2])):
if exclude_file.is_file():
exclude_file.unlink()
self.report_removed(exclude_file)
else: # pragma: no cover
warnings.warn(f"Unsupported command in 'additional-files': {entry}")
#
# elif parts[0] == "global-include":
# for include_pat in parts[1:]:
# for include_file in self.project_dir.rglob(include_pat):
# if include_file.is_file():
# copy_file(filename=include_file)
#
# elif parts[0] == "global-exclude":
# for exclude_pat in parts[1:]:
# for exclude_file in self.project_dir.rglob(exclude_pat):
# if exclude_file.is_file():
# exclude_file.unlink()
# self.report_removed(exclude_file)
#
# elif parts[0] == "graft":
# for graft_dir in self.project_dir.rglob(parts[1]):
# for graft_file in graft_dir.rglob("*.*"):
# if graft_file.is_file():
# copy_file(graft_file)
#
# elif parts[0] == "prune":
# for prune_dir in self.project_dir.rglob(parts[1]):
# for prune_file in prune_dir.rglob("*.*"):
# if prune_file.is_file():
# prune_file.unlink()
# self.report_removed(exclude_file)
return
def write_license(self, dest_dir: PathPlus, dest_filename: str = "LICENSE"):
"""
Write the ``LICENSE`` file.
:param dest_dir: The directory to write the file into.
:param dest_filename: The name of the file to write in ``dest_dir``.
"""
if self.config.get("license", None) is not None:
target = dest_dir / dest_filename
target.parent.maybe_make(parents=True)
target.write_clean(self.config["license"].text)
self.report_written(target)
def parse_authors(self) -> Dict[str, str]:
"""
Parse the :tconf:`project.authors` and :tconf:`~project.maintainers` fields into :core-meta:`Author`,
:core-meta:`Maintainer-Email` etc.
:return: A mapping of field names to values.
Possible field names are ``Author``, ``Author-Email``, ``Maintainer``, and ``Maintainer-Email``.
""" # noqa: D400
author = []
author_email = []
maintainer = []
maintainer_email = []
for entry in self.config["authors"]:
if entry["name"] and entry["email"]:
address = Address(entry["name"], addr_spec=entry["email"])
author_email.append(str(address))
elif entry["email"]:
author_email.append(entry["email"])
elif entry["name"]:
author.append(entry["name"])
for entry in self.config["maintainers"]:
if entry["name"] and entry["email"]:
address = Address(entry["name"], addr_spec=entry["email"])
author_email.append(str(address))
elif entry["email"]:
maintainer_email.append(entry["email"])
elif entry["name"]:
maintainer.append(entry["name"])
# TODO: I'm not quite sure how PEP 621 expects a name for one author and the email for another to be handled.
output = {}
if author_email:
output["Author-email"] = ", ".join(author_email)
elif author:
output["Author"] = word_join(author)
if maintainer_email:
output["Maintainer-email"] = ", ".join(maintainer_email)
elif maintainer:
output["Maintainer"] = word_join(maintainer)
return output
def get_metadata_map(self) -> MetadataMapping:
"""
Generate the content of the ``METADATA`` / ``PKG-INFo`` file.
"""
metadata_mapping = MetadataMapping()
# TODO: metadata 2.2
# Need to translate pep621 dynamic into core metadata field names
metadata_mapping["Metadata-Version"] = "2.1"
metadata_mapping["Name"] = self.config["name"]
metadata_mapping["Version"] = str(self.config["version"])
def add_not_none(key: str, field: str):
if self.config[key] is not None:
metadata_mapping[field] = self.config[key]
def add_multiple(key: str, field: str):
for value in self.config[key]:
metadata_mapping[field] = str(value)
metadata_mapping.update(self.parse_authors())
add_not_none("description", "Summary")
add_not_none("license-key", "License")
add_multiple("classifiers", "Classifier")
add_multiple("dependencies", "Requires-Dist")
if self.config["keywords"]:
metadata_mapping["Keywords"] = ','.join(self.config["keywords"])
seen_hp = False
for category, url in self.config["urls"].items():
if category.lower() in {"homepage", "home page"} and not seen_hp:
metadata_mapping["Home-page"] = url
seen_hp = True
else:
metadata_mapping["Project-URL"] = f"{category}, {url}"
for platform in (self.config.get("platforms", None) or ()):
metadata_mapping["Platform"] = platform
if self.config["requires-python"]:
metadata_mapping["Requires-Python"] = str(self.config["requires-python"])
for extra, requirements in self.config["optional-dependencies"].items():
metadata_mapping["Provides-Extra"] = extra
for requirement in requirements:
requirement = ComparableRequirement(str(requirement))
if requirement.marker:
requirement.marker = f"({requirement.marker!s}) and extra == {extra!r}"
else:
requirement.marker = f"extra == {extra!r}"
metadata_mapping["Requires-Dist"] = str(requirement)
# TODO:
# https://packaging.python.org/specifications/core-metadata/#requires-external-multiple-use
# https://packaging.python.org/specifications/core-metadata/#provides-dist-multiple-use
# https://packaging.python.org/specifications/core-metadata/#obsoletes-dist-multiple-use
if self.config["readme"] is not None:
metadata_mapping["Description"] = self.config["readme"].text
metadata_mapping["Description-Content-Type"] = self.config["readme"].content_type
return metadata_mapping
def write_metadata(self, metadata_file: PathPlus, metadata_mapping: MetadataMapping):
"""
Write `Core Metadata`_ to the given file.
.. _Core Metadata: https://packaging.python.org/specifications/core-metadata
:param metadata_file:
"""
metadata_file.write_text(metadata.dumps(metadata_mapping))
self.report_written(metadata_file)
def call_additional_hooks(self):
"""
Subclasses may call this method to give *their* subclasses an opportunity to run custom code.
For example, the wheel builder calls this as the final step before adding files to the archive,
giving an opportunity for subclasses of :class:`~.WheelBuilder` to include additional steps
without having to override the entire :meth:`~.WheelBuilder.build_wheel` method.
"""
@abstractmethod
def build(self):
"""
Build the distribution.
:returns: The filename of the created archive.
"""
raise NotImplementedError
class SDistBuilder(AbstractBuilder):
"""
Builds source distributions using metadata read from ``pyproject.toml``.
:param project_dir: The project to build the distribution for.
:param build_dir: The (temporary) build directory.
:default build_dir: :file:`{<project_dir>}/build/sdist`
:param out_dir: The output directory.
:default out_dir: :file:`{<project_dir>}/dist`
:param verbose: Enable verbose output.
"""
@property
def default_build_dir(self) -> PathPlus: # pragma: no cover
"""
Provides a default for the ``build_dir`` argument.
"""
return self.project_dir / "build" / "sdist"
@property
def code_directory(self) -> str:
"""
The directory containing the code in the build and project directories.
"""
return ''
def create_sdist_archive(self) -> str:
"""
Create the sdist archive.
:return: The filename of the created archive.
"""
self.out_dir.maybe_make(parents=True)
sdist_filename = self.out_dir / f"{self.archive_name}.tar.gz"
with tarfile.open(sdist_filename, mode="w:gz", format=tarfile.PAX_FORMAT) as sdist_archive:
for file in self.build_dir.rglob('*'):
if file.is_file():
arcname = posixpath.join(self.archive_name, file.relative_to(self.build_dir).as_posix())
sdist_archive.add(str(file), arcname=arcname)
self._echo(Fore.GREEN(f"Source distribution created at {sdist_filename.resolve().as_posix()}"))
return os.path.basename(sdist_filename)
def write_pyproject_toml(self):
"""
Write the ``pyproject.toml`` file.
"""
# Copy pyproject.toml
pp_toml = dom_toml.load(self.project_dir / "pyproject.toml")
# Ensure whey is the build backend and a requirement
pp_toml.setdefault("build-system", {})
current_requires = map(ComparableRequirement, pp_toml["build-system"].get("requires", ()))
new_requirements = combine_requirements(ComparableRequirement("whey"), *current_requires)
pp_toml["build-system"]["requires"] = list(map(str, sorted(new_requirements)))
pp_toml["build-system"]["build-backend"] = "whey"
dynamic = set(pp_toml["project"].get("dynamic", ()))
# Make the "dependencies" static
if "dependencies" in dynamic:
dynamic.remove("dependencies")
pp_toml["project"]["dependencies"] = list(map(str, sorted(self.config["dependencies"])))
# Make the "classifiers" static
if "classifiers" in dynamic:
dynamic.remove("classifiers")
pp_toml["project"]["classifiers"] = list(map(str, sorted(self.config["classifiers"])))
# Make "requires-python" static
if "requires-python" in dynamic:
dynamic.remove("requires-python")
pp_toml["project"]["requires-python"] = str(self.config["requires-python"])
# Set the new value for "dynamic"
pp_toml["project"]["dynamic"] = dynamic
dom_toml.dump(pp_toml, self.build_dir / "pyproject.toml", encoder=dom_toml.TomlEncoder)
self.report_copied(self.project_dir / "pyproject.toml", self.build_dir / "pyproject.toml")
def build_sdist(self) -> str:
"""
Build the source distribution.
:return: The filename of the created archive.
"""
if self.build_dir.is_dir():
shutil.rmtree(self.build_dir)
self.build_dir.maybe_make(parents=True)
self.copy_source()
self.copy_additional_files()
self.write_license(self.build_dir)
self.write_pyproject_toml()
for filename in ["requirements.txt"]:
source = self.project_dir / filename
if source.is_file():
dest = self.build_dir / filename
dest.write_clean(source.read_text())
self.report_copied(source, dest)
self.write_readme()
self.write_metadata(self.build_dir / "PKG-INFO", self.get_metadata_map())
self.call_additional_hooks()
return self.create_sdist_archive()
def write_readme(self):
"""
Write the ``README.*`` file.
"""
if self.config["readme"] is None:
return
if self.config["readme"].content_type == "text/x-rst":
target = self.build_dir / "README.rst"
elif self.config["readme"].content_type == "text/markdown":
target = self.build_dir / "README.md"
else:
target = self.build_dir / "README"
target.parent.maybe_make(parents=True)
target.write_clean(self.config["readme"].text)
self.report_written(target)
build = build_sdist
class WheelBuilder(AbstractBuilder):
"""
Builds wheel binary distributions using metadata read from ``pyproject.toml``.
:param project_dir: The project to build the distribution for.
:param build_dir: The (temporary) build directory.
:default build_dir: :file:`{<project_dir>}/build/wheel`
:param out_dir: The output directory.
:default out_dir: :file:`{<project_dir>}/dist`
:param verbose: Enable verbose output.
.. autosummary-widths:: 11/32
:html: 4/10
.. latex:vspace:: -10px
"""
@property
def default_build_dir(self) -> PathPlus: # pragma: no cover
"""
Provides a default for the ``build_dir`` argument.
"""
return self.project_dir / "build" / "wheel"
@property
def dist_info(self) -> PathPlus:
"""
The ``*.dist-info`` directory in the build directory.
"""
dist_info = self.build_dir / f"{self.archive_name}.dist-info"
dist_info.maybe_make(parents=True)
return dist_info
@property
def tag(self) -> str:
"""
The tag for the wheel.
"""
return "py3-none-any"
@property
def generator(self) -> str:
"""
The value for the ``Generator`` field in ``*.dist-info/WHEEL``.
"""
# this package
from whey import __version__
return f"whey ({__version__})"
def write_entry_points(self) -> None:
"""
Write the list of entry points to the wheel, as specified in
``[project.scripts]``, ``[project.gui-scripts]`` and ``[project.entry-points]``
""" # noqa: D400
ep_dict = {}
if self.config["scripts"]:
ep_dict["console_scripts"] = self.config["scripts"]
if self.config["gui-scripts"]:
ep_dict["gui_scripts"] = self.config["gui-scripts"]
ep_dict.update(self.config["entry-points"])
entry_points_file = self.dist_info / "entry_points.txt"
entry_points.dump(ep_dict, entry_points_file)
self.report_written(entry_points_file)
def write_wheel(self) -> None:
"""
Write the metadata to the ``WHEEL`` file.
"""
wheel_file = self.dist_info / "WHEEL"
wheel_file.write_clean(
wheel.dumps({
"Wheel-Version": "1.0",
"Generator": self.generator,
"Root-Is-Purelib": True,
"Tag": [self.tag],
})
)
self.report_written(wheel_file)
@staticmethod
def get_source_epoch() -> Optional[datetime]:
"""
Returns the parsed value of the :envvar:`SOURCE_DATE_EPOCH` environment variable, or :py:obj:`None` if unset.
See https://reproducible-builds.org/specs/source-date-epoch/ for the specification.
:raises ValueError: if the value is in an invalid format.
"""
# If SOURCE_DATE_EPOCH is set (e.g. by Debian), it's used for timestamps inside the wheel.
epoch: Optional[str] = os.environ.get("SOURCE_DATE_EPOCH")
if epoch is None:
return None
elif epoch.isdigit():
return datetime.utcfromtimestamp(int(epoch))
else:
raise ValueError(f"'SOURCE_DATE_EPOCH' must be an integer with no fractional component, not {epoch!r}")
def create_wheel_archive(self) -> str:
"""
Create the wheel archive.
:return: The filename of the created archive.
"""
wheel_filename = self.out_dir / f"{self.archive_name}-{self.tag}.whl"
self.out_dir.maybe_make(parents=True)
mtime = self.get_source_epoch()
non_record_filenames = []
record_filenames = []
for file in self.build_dir.rglob('*'):
if not file.is_file():
continue
if "RECORD" in file.name and self.dist_info.name in file.parts:
record_filenames.append(file)
continue
non_record_filenames.append(file)
record_filenames = sort_paths(*record_filenames, self.dist_info / "RECORD")
with handy_archives.ZipFile(wheel_filename, mode='w') as wheel_archive:
with (self.dist_info / "RECORD").open('w') as fp:
for file in sort_paths(*non_record_filenames):
fp.write(f"{get_record_entry(file, relative_to=self.build_dir)}\n")
wheel_archive.write_file(
file,
arcname=file.relative_to(self.build_dir),
mtime=mtime,
)
for file in record_filenames:
fp.write(f"{file.relative_to(self.build_dir).as_posix()},,\n")
for file in record_filenames:
wheel_archive.write_file(
file,
arcname=file.relative_to(self.build_dir),
mtime=mtime,
)
self.report_written(file)
self._echo(Fore.GREEN(f"Wheel created at {wheel_filename.resolve().as_posix()}"))
return wheel_filename.name
def build_wheel(self) -> str:
"""
Build the binary wheel distribution.
:return: The filename of the created archive.
"""
if self.build_dir.is_dir():
shutil.rmtree(self.build_dir)
self.build_dir.maybe_make(parents=True)
self.copy_source()
self.copy_additional_files()
self.write_license(self.dist_info)
self.write_entry_points()
self.write_metadata(self.dist_info / "METADATA", self.get_metadata_map())
self.write_wheel()
self.call_additional_hooks()
return self.create_wheel_archive()
build = build_wheel
| 29.135654 | 111 | 0.708982 |
acf618731b0454a88cac4d3772d7f609ad47b58a | 1,061 | py | Python | setup.py | giordyb/python-keycloak | 67798d4738ff1016a7ff31e763d7badd4daa7dbc | [
"MIT"
] | null | null | null | setup.py | giordyb/python-keycloak | 67798d4738ff1016a7ff31e763d7badd4daa7dbc | [
"MIT"
] | null | null | null | setup.py | giordyb/python-keycloak | 67798d4738ff1016a7ff31e763d7badd4daa7dbc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='python-keycloak',
version='0.21.0',
url='https://github.com/marcospereirampj/python-keycloak',
license='The MIT License',
author='Marcos Pereira',
author_email='marcospereira.mpj@gmail.com',
keywords='keycloak openid',
description='python-keycloak is a Python package providing access to the Keycloak API.',
long_description=long_description,
long_description_content_type="text/markdown",
packages=['keycloak', 'keycloak.authorization', 'keycloak.tests'],
install_requires=['requests>=2.20.0', 'python-jose>=1.4.0'],
tests_require=['httmock>=1.2.5'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Development Status :: 3 - Alpha',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Topic :: Utilities'
]
)
| 33.15625 | 92 | 0.647502 |
acf61a33bc176430dfd98b4dd8ae1541762d04a3 | 48,891 | py | Python | nfv/nfv-plugins/nfv_plugins/nfvi_plugins/nfvi_guest_api.py | riddopic/nfv | e5ced4ade4916910646bcf8018dfabadef447fc2 | [
"Apache-2.0"
] | 2 | 2020-02-07T19:01:36.000Z | 2022-02-23T01:41:46.000Z | nfv/nfv-plugins/nfv_plugins/nfvi_plugins/nfvi_guest_api.py | riddopic/nfv | e5ced4ade4916910646bcf8018dfabadef447fc2 | [
"Apache-2.0"
] | 1 | 2021-01-14T12:02:25.000Z | 2021-01-14T12:02:25.000Z | nfv/nfv-plugins/nfv_plugins/nfvi_plugins/nfvi_guest_api.py | riddopic/nfv | e5ced4ade4916910646bcf8018dfabadef447fc2 | [
"Apache-2.0"
] | 2 | 2021-01-13T08:39:21.000Z | 2022-02-09T00:21:55.000Z | #
# Copyright (c) 2015-2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import json
from six.moves import http_client as httplib
from nfv_common import debug
from nfv_vim import nfvi
from nfv_plugins.nfvi_plugins import config
from nfv_plugins.nfvi_plugins.openstack import exceptions
from nfv_plugins.nfvi_plugins.openstack import guest
from nfv_plugins.nfvi_plugins.openstack import openstack
from nfv_plugins.nfvi_plugins.openstack import rest_api
from nfv_plugins.nfvi_plugins.openstack.objects import OPENSTACK_SERVICE
DLOG = debug.debug_get_logger('nfv_plugins.nfvi_plugins.guest_api')
def guest_service_get_name(name):
"""
Convert the nfvi guest service naming
"""
if guest.GUEST_SERVICE_NAME.HEARTBEAT == name:
return nfvi.objects.v1.GUEST_SERVICE_NAME.HEARTBEAT
else:
return nfvi.objects.v1.GUEST_SERVICE_NAME.UNKNOWN
def guest_service_get_admin_state(state):
"""
Convert the nfvi guest service state to a guest service administrative
state
"""
if guest.GUEST_SERVICE_STATE.ENABLED == state:
return nfvi.objects.v1.GUEST_SERVICE_ADMIN_STATE.UNLOCKED
else:
return nfvi.objects.v1.GUEST_SERVICE_ADMIN_STATE.LOCKED
def guest_service_get_service_state(state):
"""
Convert the guest service administrative state to nfvi guest service state
"""
if nfvi.objects.v1.GUEST_SERVICE_ADMIN_STATE.UNLOCKED == state:
return guest.GUEST_SERVICE_STATE.ENABLED
else:
return guest.GUEST_SERVICE_STATE.DISABLED
def guest_service_get_oper_state(status):
"""
Convert the nfvi guest service status to a guest service operational
state
"""
if guest.GUEST_SERVICE_STATUS.ENABLED == status:
return nfvi.objects.v1.GUEST_SERVICE_OPER_STATE.ENABLED
else:
return nfvi.objects.v1.GUEST_SERVICE_OPER_STATE.DISABLED
def instance_get_event(action_type, pre_notification):
"""
Convert the action type to a guest instance event
"""
if nfvi.objects.v1.INSTANCE_ACTION_TYPE.PAUSE == action_type:
event = guest.GUEST_EVENT.PAUSE
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.UNPAUSE == action_type:
event = guest.GUEST_EVENT.UNPAUSE
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.SUSPEND == action_type:
event = guest.GUEST_EVENT.SUSPEND
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.RESUME == action_type:
event = guest.GUEST_EVENT.RESUME
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.REBOOT == action_type:
event = guest.GUEST_EVENT.REBOOT
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.STOP == action_type:
event = guest.GUEST_EVENT.STOP
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.LIVE_MIGRATE == action_type:
if pre_notification:
event = guest.GUEST_EVENT.LIVE_MIGRATE_BEGIN
else:
event = guest.GUEST_EVENT.LIVE_MIGRATE_END
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.COLD_MIGRATE == action_type:
if pre_notification:
event = guest.GUEST_EVENT.COLD_MIGRATE_BEGIN
else:
event = guest.GUEST_EVENT.COLD_MIGRATE_END
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.RESIZE == action_type:
if pre_notification:
event = guest.GUEST_EVENT.RESIZE_BEGIN
else:
event = guest.GUEST_EVENT.RESIZE_END
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.CONFIRM_RESIZE == action_type:
event = guest.GUEST_EVENT.RESIZE_END
elif nfvi.objects.v1.INSTANCE_ACTION_TYPE.REVERT_RESIZE == action_type:
event = guest.GUEST_EVENT.RESIZE_END
else:
event = guest.GUEST_EVENT.UNKNOWN
DLOG.error("Unsupported action-type %s" % action_type)
return event
def instance_get_action_type(event):
"""
Convert guest instance event to an action type
"""
if guest.GUEST_EVENT.PAUSE == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.PAUSE
elif guest.GUEST_EVENT.UNPAUSE == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.UNPAUSE
elif guest.GUEST_EVENT.SUSPEND == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.SUSPEND
elif guest.GUEST_EVENT.RESUME == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.RESUME
elif guest.GUEST_EVENT.REBOOT == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.REBOOT
elif guest.GUEST_EVENT.STOP == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.STOP
elif guest.GUEST_EVENT.LIVE_MIGRATE_BEGIN == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.LIVE_MIGRATE
elif guest.GUEST_EVENT.LIVE_MIGRATE_END == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.LIVE_MIGRATE
elif guest.GUEST_EVENT.COLD_MIGRATE_BEGIN == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.COLD_MIGRATE
elif guest.GUEST_EVENT.COLD_MIGRATE_END == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.COLD_MIGRATE
elif guest.GUEST_EVENT.RESIZE_BEGIN == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.RESIZE
elif guest.GUEST_EVENT.RESIZE_END == event:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.RESIZE
else:
action_type = nfvi.objects.v1.INSTANCE_ACTION_TYPE.UNKNOWN
DLOG.error("Unsupported event %s" % event)
return action_type
def get_services_with_guest_service_state(services):
"""
Return guest service data with guest service administrative state
converted to nfvi guest service state
"""
services_data = []
for service in services:
service_data = dict()
service_data['service'] = service['service']
service_data['state'] \
= guest_service_get_service_state(service['admin_state'])
services_data.append(service_data)
return services_data
class NFVIGuestAPI(nfvi.api.v1.NFVIGuestAPI):
"""
NFVI Guest API Class Definition
"""
_name = 'Guest-API'
_version = '1.0.0'
_provider = 'Wind River'
_signature = '22b3dbf6-e4ba-441b-8797-fb8a51210a43'
@property
def name(self):
return self._name
@property
def version(self):
return self._version
@property
def provider(self):
return self._provider
@property
def signature(self):
return self._signature
def __init__(self):
super(NFVIGuestAPI, self).__init__()
self._token = None
self._directory = None
self._openstack_directory = None
self._rest_api_server = None
self._host_services_query_callback = None
self._guest_services_query_callback = None
self._guest_services_state_notify_callbacks = list()
self._guest_services_alarm_notify_callbacks = list()
self._guest_services_action_notify_callbacks = list()
def _host_supports_nova_compute(self, personality):
return (('worker' in personality) and
(self._openstack_directory.get_service_info(
OPENSTACK_SERVICE.NOVA) is not None))
def guest_services_create(self, future, instance_uuid, host_name,
services, callback):
"""
Guest Services Create
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._token is None or self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"instance_uuid=%s." % instance_uuid)
return
self._token = future.result.data
future.work(guest.guest_services_create, self._token,
instance_uuid, host_name, services)
future.result = (yield)
if not future.result.is_complete():
return
response['completed'] = True
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to create "
"guest services, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to create "
"guest services, instance_uuid=%s, error=%s."
% (instance_uuid, e))
finally:
callback.send(response)
callback.close()
def guest_services_set(self, future, instance_uuid, host_name,
services, callback):
"""
Guest Services Set
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._token is None or self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"instance_uuid=%s." % instance_uuid)
return
self._token = future.result.data
services_data = get_services_with_guest_service_state(services)
future.work(guest.guest_services_set, self._token,
instance_uuid, host_name, services_data)
future.result = (yield)
if not future.result.is_complete():
return
set_data = future.result.data
result_data = dict()
result_data['instance_uuid'] = set_data['uuid']
result_data['host_name'] = set_data['hostname']
service_objs = list()
for service in set_data.get('services', list()):
service_name = guest_service_get_name(service['service'])
admin_state = guest_service_get_admin_state(service['state'])
oper_state = guest_service_get_oper_state(service['status'])
restart_timeout = service.get('restart-timeout', None)
if restart_timeout is not None:
restart_timeout = int(restart_timeout)
service_obj = nfvi.objects.v1.GuestService(
service_name, admin_state, oper_state, restart_timeout)
service_objs.append(service_obj)
result_data['services'] = service_objs
response['result-data'] = result_data
response['completed'] = True
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to set "
"guest services, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to set "
"guest services, instance_uuid=%s, error=%s."
% (instance_uuid, e))
finally:
callback.send(response)
callback.close()
def guest_services_delete(self, future, instance_uuid, callback):
"""
Guest Services Delete
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._token is None or self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"instance_uuid=%s." % instance_uuid)
return
self._token = future.result.data
future.work(guest.guest_services_delete, self._token,
instance_uuid)
try:
future.result = (yield)
if not future.result.is_complete():
return
except exceptions.OpenStackRestAPIException as e:
if httplib.NOT_FOUND != e.http_status_code:
raise
response['completed'] = True
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to delete "
"guest services, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to delete "
"guest services, instance_uuid=%s, error=%s."
% (instance_uuid, e))
finally:
callback.send(response)
callback.close()
def guest_services_query(self, future, instance_uuid, callback):
"""
Guest Services Query
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._token is None or self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"instance_uuid=%s." % instance_uuid)
return
self._token = future.result.data
future.work(guest.guest_services_query, self._token, instance_uuid)
future.result = (yield)
if not future.result.is_complete():
DLOG.error("Guest-Services query failed, operation "
"did not complete, instance_uuid=%s" %
instance_uuid)
return
query_data = future.result.data
result_data = dict()
result_data['instance_uuid'] = query_data['uuid']
result_data['host_name'] = query_data['hostname']
service_objs = list()
for service in query_data.get('services', list()):
service_name = guest_service_get_name(service['service'])
admin_state = guest_service_get_admin_state(service['state'])
oper_state = guest_service_get_oper_state(service['status'])
restart_timeout = service.get('restart-timeout', None)
if restart_timeout is not None:
restart_timeout = int(restart_timeout)
service_obj = nfvi.objects.v1.GuestService(
service_name, admin_state, oper_state, restart_timeout)
service_objs.append(service_obj)
result_data['services'] = service_objs
response['result-data'] = result_data
response['completed'] = True
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to query "
"guest services, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to query "
"guest services, instance_uuid=%s, error=%s."
% (instance_uuid, e))
finally:
callback.send(response)
callback.close()
def guest_services_vote(self, future, instance_uuid, host_name,
action_type, callback):
"""
Guest Services Vote
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._token is None or self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"instance_uuid=%s." % instance_uuid)
return
self._token = future.result.data
future.work(guest.guest_services_vote, self._token, instance_uuid,
host_name, instance_get_event(action_type,
pre_notification=True))
future.result = (yield)
if not future.result.is_complete():
DLOG.error("Guest-Services vote failed, action-type %s "
"did not complete, instance_uuid=%s" %
(action_type, instance_uuid))
return
vote_data = future.result.data
response['uuid'] = vote_data.get('uuid', instance_uuid)
response['host-name'] = vote_data.get('hostname', host_name)
response['action-type'] = vote_data.get('action', action_type)
response['timeout'] = int(vote_data.get('timeout', '15'))
response['completed'] = True
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to vote,"
"action_type=%s error=%s." % (action_type, e))
except Exception as e:
DLOG.exception("Caught exception while trying to vote "
"guest services, instance_uuid=%s, error=%s."
% (instance_uuid, e))
finally:
callback.send(response)
callback.close()
def guest_services_notify(self, future, instance_uuid, host_name,
action_type, pre_notification, callback):
"""
Guest Services Notify
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._token is None or self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"instance_uuid=%s." % instance_uuid)
return
self._token = future.result.data
future.work(guest.guest_services_notify, self._token,
instance_uuid, host_name,
instance_get_event(action_type, pre_notification))
future.result = (yield)
if not future.result.is_complete():
DLOG.error("Guest-Services_notify failed, action-type %s "
"did not complete, instance_uuid=%s" %
(action_type, instance_uuid))
return
notify_data = future.result.data
response['uuid'] = notify_data.get('uuid', instance_uuid)
response['host-name'] = notify_data.get('hostname', host_name)
response['action-type'] = notify_data.get('action', action_type)
response['timeout'] = int(notify_data.get('timeout', '15'))
response['completed'] = True
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to notify,"
"action_type=%s error=%s." % (action_type, e))
except Exception as e:
DLOG.exception("Caught exception while trying to notify "
"guest services, instance_uuid=%s, error=%s."
% (instance_uuid, e))
finally:
callback.send(response)
callback.close()
def create_host_services(self, future, host_uuid, host_name,
host_personality, callback):
"""
Create Host Services, notify Guest to create services for a host
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._host_supports_nova_compute(host_personality):
response['reason'] = 'failed to get platform token from ' \
'keystone'
if self._token is None or \
self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"host_uuid=%s, host_name=%s." % (host_uuid,
host_name))
return
self._token = future.result.data
response['reason'] = 'failed to create guest services'
try:
# Send the create request to Guest.
future.work(guest.host_services_create,
self._token,
host_uuid, host_name)
future.result = (yield)
if not future.result.is_complete():
DLOG.error("Guest host-services-create failed, "
"operation did not complete, host_uuid=%s, "
"host_name=%s."
% (host_uuid, host_name))
return
response['reason'] = 'failed to disable guest services'
# Send the disable request to Guest
future.work(guest.host_services_disable,
self._token,
host_uuid, host_name)
future.result = (yield)
if not future.result.is_complete():
# do not return since the disable will be retried
# by audit
DLOG.error("Guest host-services-disable failed, "
"operation did not complete, host_uuid=%s, "
"host_name=%s."
% (host_uuid, host_name))
except exceptions.OpenStackRestAPIException as e:
# Guest can send a 404 if it hasn't got the host
# inventory yet.
# Guest will catch up later, no need to fail here.
if httplib.NOT_FOUND != e.http_status_code:
raise
response['completed'] = True
response['reason'] = ''
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to create "
"guest services on host, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to create %s "
"guest services, error=%s." % (host_name, e))
finally:
callback.send(response)
callback.close()
def delete_host_services(self, future, host_uuid, host_name,
host_personality, callback):
"""
Delete Host Services, notify Guest to delete services for a host
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._host_supports_nova_compute(host_personality):
response['reason'] = 'failed to get platform token from ' \
'keystone'
if self._token is None or \
self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"host_uuid=%s, host_name=%s." % (host_uuid,
host_name))
return
self._token = future.result.data
response['reason'] = 'failed to delete guest services'
# Send the delete request to Guest.
future.work(guest.host_services_delete, self._token,
host_uuid)
try:
future.result = (yield)
if not future.result.is_complete():
DLOG.error("Guest host-services-delete for host "
"failed, operation did not complete, "
"host_uuid=%s, host_name=%s."
% (host_uuid, host_name))
return
except exceptions.OpenStackRestAPIException as e:
if httplib.NOT_FOUND != e.http_status_code:
raise
response['completed'] = True
response['reason'] = ''
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to delete "
"host services on host, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to delete %s "
"guest services, error=%s."
% (host_name, e))
finally:
callback.send(response)
callback.close()
def enable_host_services(self, future, host_uuid, host_name,
host_personality, callback):
"""
Enable Host Services, notify Guest to enable services for a host.
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._host_supports_nova_compute(host_personality):
response['reason'] = 'failed to get platform token from ' \
'keystone'
if self._token is None or \
self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"host_uuid=%s, host_name=%s." % (host_uuid,
host_name))
return
self._token = future.result.data
response['reason'] = 'failed to enable guest services'
# Send the Enable request to Guest
future.work(guest.host_services_enable, self._token,
host_uuid, host_name)
future.result = (yield)
if not future.result.is_complete():
DLOG.error("Guest host-services-enable failed, operation "
"did not complete, host_uuid=%s, host_name=%s."
% (host_uuid, host_name))
return
response['completed'] = True
response['reason'] = ''
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to enable "
"guest services on host, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to enable %s "
"guest services, error=%s."
% (host_name, e))
finally:
callback.send(response)
callback.close()
def disable_host_services(self, future, host_uuid, host_name,
host_personality, callback):
"""
Notifies Guest to disable their services for the specified
host (as applicable)
"""
response = dict()
response['completed'] = False
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
# The following only applies to worker hosts
if self._host_supports_nova_compute(host_personality):
response['reason'] = 'failed to get platform token from ' \
'keystone'
if self._token is None or \
self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"host_uuid=%s, host_name=%s." % (host_uuid,
host_name))
return
self._token = future.result.data
response['reason'] = 'failed to disable guest services'
# Send the Disable request to Guest.
future.work(guest.host_services_disable, self._token,
host_uuid, host_name)
try:
future.result = (yield)
if not future.result.is_complete():
# Do not return since the disable will be retried
# by audit
DLOG.error("Guest host-services-disable failed, "
"operation did not complete, host_uuid=%s, "
"host_name=%s."
% (host_uuid, host_name))
except exceptions.OpenStackRestAPIException as e:
if httplib.NOT_FOUND != e.http_status_code:
raise
response['completed'] = True
response['reason'] = ''
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to disable "
"guest services, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to disable %s "
"guest services, error=%s."
% (host_name, e))
finally:
callback.send(response)
callback.close()
def query_host_services(self, future, host_uuid, host_name,
host_personality, callback):
"""
Query Host Services, return state of Guest services for a host.
"""
response = dict()
response['completed'] = False
response['result-data'] = 'enabled'
response['reason'] = ''
try:
future.set_timeouts(config.CONF.get('nfvi-timeouts', None))
if self._host_supports_nova_compute(host_personality):
if self._token is None or \
self._token.is_expired():
future.work(openstack.get_token, self._directory)
future.result = (yield)
if not future.result.is_complete() or \
future.result.data is None:
DLOG.error("OpenStack get-token did not complete, "
"host_uuid=%s, host_name=%s." % (host_uuid,
host_name))
return
self._token = future.result.data
# Send Query request to Guest
future.work(guest.host_services_query, self._token,
host_uuid, host_name)
future.result = (yield)
if not future.result.is_complete():
DLOG.error("Guest query-host-services failed, operation "
"did not complete, host_uuid=%s, host_name=%s."
% (host_uuid, host_name))
else:
result_data = future.result.data
response['result-data'] = result_data['state']
response['completed'] = True
except exceptions.OpenStackRestAPIException as e:
if httplib.UNAUTHORIZED == e.http_status_code:
response['error-code'] = nfvi.NFVI_ERROR_CODE.TOKEN_EXPIRED
if self._token is not None:
self._token.set_expired()
else:
DLOG.exception("Caught exception while trying to query "
"host services, error=%s." % e)
except Exception as e:
DLOG.exception("Caught exception while trying to query %s "
"nova or neutron openstack services, error=%s."
% (host_name, e))
finally:
callback.send(response)
callback.close()
def host_services_rest_api_get_handler(self, request_dispatch):
"""
Host-Services Rest-API GET handler
"""
content_len = int(request_dispatch.headers.get('content-length', 0))
content = request_dispatch.rfile.read(content_len)
DLOG.info("Content=%s, len=%s" % (content, content_len))
http_payload = None
http_response = httplib.OK
if content:
host_data = json.loads(content)
if host_data['uuid'] is None:
DLOG.error("Invalid host uuid received")
http_response = httplib.BAD_REQUEST
elif host_data['hostname'] is None:
DLOG.info("Invalid host name received")
http_response = httplib.BAD_REQUEST
else:
(success, host_state) = \
self._host_services_query_callback(host_data['hostname'])
if not success:
http_response = httplib.BAD_REQUEST
else:
http_payload = dict()
http_payload['uuid'] = host_data['uuid']
http_payload['hostname'] = host_data['hostname']
http_payload['state'] = host_state
else:
http_response = httplib.NO_CONTENT
DLOG.debug("Host rest-api get path: %s." % request_dispatch.path)
request_dispatch.send_response(http_response)
if http_payload is not None:
request_dispatch.send_header('Content-Type', 'application/json')
request_dispatch.end_headers()
request_dispatch.wfile.write(json.dumps(http_payload).encode())
request_dispatch.done()
def guest_services_rest_api_get_handler(self, request_dispatch):
"""
Guest-Services Rest-API GET handler
"""
# /nfvi-plugins/v1/instances/<instance-uuid>
# /nfvi-plugins/v1/instances/?host_uuid=<host-uuid>
host_uuid = None
instance_uuid = None
http_payload = None
path = request_dispatch.path
host_prefix = "host_uuid"
if host_prefix in path:
host_uuid = path.split('=')[-1]
else:
instance_uuid = path.split('/')[-1]
DLOG.debug("Guest-Services rest-api path=%s, host_uuid=%s, "
"instance_uuid=%s" % (path, host_uuid, instance_uuid))
http_response = httplib.OK
(success, result) = \
self._guest_services_query_callback(host_uuid, instance_uuid)
if not success:
http_response = httplib.BAD_REQUEST
else:
if instance_uuid:
result['services'] = \
get_services_with_guest_service_state(result['services'])
else:
for r in result['instances']:
r['services'] = \
get_services_with_guest_service_state(r['services'])
http_payload = result
DLOG.debug("Guest-Services rest-api get path: %s." %
request_dispatch.path)
request_dispatch.send_response(http_response)
if http_payload is not None:
request_dispatch.send_header('Content-Type', 'application/json')
request_dispatch.end_headers()
request_dispatch.wfile.write(json.dumps(http_payload).encode())
request_dispatch.done()
def guest_services_rest_api_patch_handler(self, request_dispatch):
"""
Guest-Services Rest-API PATCH handler callback
"""
content_len = int(request_dispatch.headers.get('content-length', 0))
content = request_dispatch.rfile.read(content_len)
http_payload = None
http_response = httplib.OK
if content:
instance_data = json.loads(content)
instance_uuid = instance_data.get('uuid', None)
host_name = instance_data.get('hostname', None)
event_type = instance_data.get('event-type', None)
event_data = instance_data.get('event-data', None)
result = None
if instance_uuid is None:
DLOG.info("Invalid instance uuid received")
http_response = httplib.BAD_REQUEST
elif event_type is None:
DLOG.info("Invalid event-type received")
http_response = httplib.BAD_REQUEST
elif event_data is None:
DLOG.info("Invalid event-data received")
http_response = httplib.BAD_REQUEST
else:
if 'service' == event_type:
services = event_data.get('services', list())
service_objs = list()
for service in services:
restart_timeout = service.get('restart-timeout', None)
if restart_timeout is not None:
restart_timeout = int(restart_timeout)
service_obj = nfvi.objects.v1.GuestService(
guest_service_get_name(service['service']),
guest_service_get_admin_state(service['state']),
guest_service_get_oper_state(service['status']),
restart_timeout)
service_objs.append(service_obj)
for callback in self._guest_services_state_notify_callbacks:
callback(instance_uuid, host_name, service_objs)
elif 'alarm' == event_type:
services = event_data.get('services', list())
for service_data in services:
if guest.GUEST_SERVICE_NAME.HEARTBEAT == \
service_data['service']:
avail_status = service_data.get('state', None)
recovery_action = \
service_data.get('repair-action', None)
if 'reboot' == recovery_action:
recovery_action = \
nfvi.objects.v1.INSTANCE_ACTION_TYPE.REBOOT
elif 'stop' == recovery_action:
recovery_action = \
nfvi.objects.v1.INSTANCE_ACTION_TYPE.STOP
elif 'log' == recovery_action:
recovery_action = \
nfvi.objects.v1.INSTANCE_ACTION_TYPE.LOG
if 'failed' == avail_status:
avail_status = \
nfvi.objects.v1.INSTANCE_AVAIL_STATUS.FAILED
elif 'unhealthy' == avail_status:
avail_status = \
nfvi.objects.v1.INSTANCE_AVAIL_STATUS.UNHEALTHY
for callback in \
self._guest_services_alarm_notify_callbacks:
(success, result) = callback(instance_uuid,
avail_status,
recovery_action)
if not success:
DLOG.error("Callback failed for "
"instance_uuid=%s" % instance_uuid)
http_response = httplib.BAD_REQUEST
elif 'action' == event_type:
action_type = \
instance_get_action_type(event_data.get('action'))
guest_response = event_data.get('guest-response')
if guest.GUEST_VOTE_STATE.REJECT == guest_response:
action_state = \
nfvi.objects.v1.INSTANCE_ACTION_STATE.REJECTED
elif guest.GUEST_VOTE_STATE.ALLOW == guest_response:
action_state = \
nfvi.objects.v1.INSTANCE_ACTION_STATE.ALLOWED
elif guest.GUEST_VOTE_STATE.PROCEED == guest_response:
action_state = \
nfvi.objects.v1.INSTANCE_ACTION_STATE.PROCEED
else:
action_state = guest.GUEST_VOTE_STATE.PROCEED
DLOG.info("Invalid guest-response received, "
"defaulting to proceed, response=%s."
% guest_response)
reason = event_data.get('reason')
for callback in \
self._guest_services_action_notify_callbacks:
result = None
success = callback(instance_uuid, action_type,
action_state, reason)
if not success:
DLOG.error("callback failed for instance_uuid=%s"
% instance_uuid)
http_response = httplib.BAD_REQUEST
else:
DLOG.info("Invalid event-type %s received" % event_type)
http_response = httplib.BAD_REQUEST
if httplib.OK == http_response:
http_payload = result
else:
http_response = httplib.NO_CONTENT
DLOG.debug("Guest-Services rest-api patch path: %s."
% request_dispatch.path)
request_dispatch.send_response(http_response)
if http_payload is not None:
request_dispatch.send_header('Content-Type', 'application/json')
request_dispatch.end_headers()
request_dispatch.wfile.write(json.dumps(http_payload).encode())
request_dispatch.done()
def register_host_services_query_callback(self, callback):
"""
Register for Host Services query
"""
self._host_services_query_callback = callback
def register_guest_services_query_callback(self, callback):
"""
Register for Guest Services query
"""
self._guest_services_query_callback = callback
def register_guest_services_state_notify_callback(self, callback):
"""
Register for Guest Services notify service type event
"""
self._guest_services_state_notify_callbacks.append(callback)
def register_guest_services_alarm_notify_callback(self, callback):
"""
Register for Guest Services notify for alarm type event
"""
self._guest_services_alarm_notify_callbacks.append(callback)
def register_guest_services_action_notify_callback(self, callback):
"""
Register for Guest Services notify for action type event
"""
self._guest_services_action_notify_callbacks.append(callback)
def initialize(self, config_file):
"""
Initialize the plugin
"""
config.load(config_file)
self._directory = openstack.get_directory(
config, openstack.SERVICE_CATEGORY.PLATFORM)
self._openstack_directory = openstack.get_directory(
config, openstack.SERVICE_CATEGORY.OPENSTACK)
self._rest_api_server = rest_api.rest_api_get_server(
config.CONF['guest-rest-api']['host'],
config.CONF['guest-rest-api']['port'])
self._rest_api_server.add_handler(
'GET', '/nfvi-plugins/v1/hosts*',
self.host_services_rest_api_get_handler)
self._rest_api_server.add_handler(
'GET', '/nfvi-plugins/v1/instances*',
self.guest_services_rest_api_get_handler)
self._rest_api_server.add_handler(
'PATCH', '/nfvi-plugins/v1/instances*',
self.guest_services_rest_api_patch_handler)
def finalize(self):
"""
Finalize the plugin
"""
return
| 39.081535 | 83 | 0.546992 |
acf61ad98332167a157612cf92436eae780cf6f5 | 995 | py | Python | winapi__windows__ctypes/windows__toast_balloontip_notifications/run_notify.py | gil9red/SimplePyScripts | c191ce08fbdeb29377639184579e392057945154 | [
"CC-BY-4.0"
] | 117 | 2015-12-18T07:18:27.000Z | 2022-03-28T00:25:54.000Z | winapi__windows__ctypes/windows__toast_balloontip_notifications/run_notify.py | gil9red/SimplePyScripts | c191ce08fbdeb29377639184579e392057945154 | [
"CC-BY-4.0"
] | 8 | 2018-10-03T09:38:46.000Z | 2021-12-13T19:51:09.000Z | winapi__windows__ctypes/windows__toast_balloontip_notifications/run_notify.py | gil9red/SimplePyScripts | c191ce08fbdeb29377639184579e392057945154 | [
"CC-BY-4.0"
] | 28 | 2016-08-02T17:43:47.000Z | 2022-03-21T08:31:12.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
# from multiprocessing import Process
from threading import Thread
from notifications import WindowsBalloonTip
def run(title: str, text: str, duration: int = 20):
WindowsBalloonTip.balloon_tip(title, text, duration)
# def run_in_process(title: str, text: str, duration: int = 20):
# Process(target=run, args=(title, text, duration)).start()
def run_in_thread(title: str, text: str, duration: int = 20):
Thread(target=run, args=(title, text, duration)).start()
if __name__ == '__main__':
run_in_thread('Уведомление1', 'Проверь!!!!')
run_in_thread('Уведомление2', 'Проверь!!!!')
run_in_thread('Уведомление3', 'Проверь!!!!')
# Process(target=run, args=('Уведомление', 'Проверь!!!!'), daemon=True).start()
# run_in_process('Уведомление', 'Проверь 2!!!!')
# Process(target=run, args=('Уведомление', 'Проверь 3!!!!'), daemon=True).start()
#
# import time
# time.sleep(5)
| 29.264706 | 85 | 0.669347 |
acf61ae1b6f5d51c32f5bcc350387e65e2f96515 | 7,835 | py | Python | test/api/test_team.py | mbovo/grafana_api | 791f7b77d60bc806049853f1e5229f45735bd921 | [
"MIT"
] | null | null | null | test/api/test_team.py | mbovo/grafana_api | 791f7b77d60bc806049853f1e5229f45735bd921 | [
"MIT"
] | null | null | null | test/api/test_team.py | mbovo/grafana_api | 791f7b77d60bc806049853f1e5229f45735bd921 | [
"MIT"
] | 1 | 2019-07-04T09:53:06.000Z | 2019-07-04T09:53:06.000Z | import unittest
import requests_mock
from grafana_api.grafana_face import GrafanaFace
class TeamsTestCase(unittest.TestCase):
def setUp(self):
self.cli = GrafanaFace(
("admin", "admin"), host="localhost", url_path_prefix="", protocol="http"
)
@requests_mock.Mocker()
def test_search_teams_url_encodes_query(self, m):
m.get(
"http://localhost/api/teams/search?query=my%20team&page=1",
json={
"totalCount": 1,
"teams": [
{
"id": 1,
"orgId": 1,
"name": "MyTestTeam",
"email": "",
"avatarUrl": "/avatar/3f49c15916554246daa714b9bd0ee398",
"memberCount": 1,
}
],
"page": 1,
"perPage": 1000,
},
)
teams = self.cli.teams.search_teams("my team")
self.assertEqual(teams[0]["name"], "MyTestTeam")
self.assertEqual(len(teams), 1)
@requests_mock.Mocker()
def test_search_teams_loads_all_pages(self, m):
m.get(
"http://localhost/api/teams/search?query=team&page=1",
json={
"totalCount": 2,
"teams": [
{
"id": 1,
"orgId": 1,
"name": "MyTestTeam",
"email": "",
"avatarUrl": "/avatar/3f49c15916554246daa714b9bd0ee398",
"memberCount": 1,
}
],
"page": 1,
"perPage": 1,
},
)
m.get(
"http://localhost/api/teams/search?query=team&page=2",
json={
"totalCount": 2,
"teams": [
{
"id": 2,
"orgId": 1,
"name": "SecondTeam",
"email": "",
"avatarUrl": "/avatar/3f49c15916554246daa714b9bd0ee398",
"memberCount": 23,
}
],
"page": 2,
"perPage": 1,
},
)
teams = self.cli.teams.search_teams("team")
self.assertEqual(teams[0]["name"], "MyTestTeam")
self.assertEqual(teams[1]["name"], "SecondTeam")
self.assertEqual(len(teams), 2)
@requests_mock.Mocker()
def test_search_teams_only_loads_requested_page(self, m):
m.get(
"http://localhost/api/teams/search?query=my%20team&page=2",
json={
"totalCount": 10,
"teams": [
{
"id": 2,
"orgId": 1,
"name": "MyTestTeam",
"email": "",
"avatarUrl": "/avatar/3f49c15916554246daa714b9bd0ee398",
"memberCount": 1,
}
],
"page": 1,
"perPage": 1,
},
)
teams = self.cli.teams.search_teams("my team", 2)
self.assertEqual(teams[0]["name"], "MyTestTeam")
self.assertEqual(len(teams), 1)
@requests_mock.Mocker()
def test_get_team_by_name(self, m):
m.get(
"http://localhost/api/teams/search?name=my%20team",
json={
"totalCount": 1,
"teams": [
{
"id": 2,
"orgId": 1,
"name": "my team",
"email": "",
"avatarUrl": "/avatar/3f49c15916554246daa714b9bd0ee398",
"memberCount": 1,
}
],
"page": 1,
"perPage": 1000,
},
)
teams = self.cli.teams.get_team_by_name("my team")
self.assertEqual(teams[0]["name"], "my team")
self.assertEqual(len(teams), 1)
@requests_mock.Mocker()
def test_get_team(self, m):
m.get(
"http://localhost/api/teams/1",
json={
"id": 1,
"orgId": 1,
"name": "MyTestTeam",
"email": "",
"created": "2017-12-15T10:40:45+01:00",
"updated": "2017-12-15T10:40:45+01:00",
},
)
team = self.cli.teams.get_team("1")
self.assertEqual(team["name"], "MyTestTeam")
@requests_mock.Mocker()
def test_add_team(self, m):
m.post(
"http://localhost/api/teams", json={"message": "Team created", "teamId": 2}
)
team = {"name": "MySecondTestTeam", "email": "email@test.com"}
new_team = self.cli.teams.add_team(team)
self.assertEqual(new_team["teamId"], 2)
@requests_mock.Mocker()
def test_update_team(self, m):
m.put("http://localhost/api/teams/3", json={"message": "Team updated"})
team = {"name": "MyThirdTestTeam", "email": "email@test.com"}
response = self.cli.teams.update_team(3, team)
self.assertEqual(response["message"], "Team updated")
@requests_mock.Mocker()
def test_delete_team(self, m):
m.delete("http://localhost/api/teams/3", json={"message": "Team deleted"})
response = self.cli.teams.delete_team(3)
self.assertEqual(response, True)
@requests_mock.Mocker()
def test_get_team_members(self, m):
m.get(
"http://localhost/api/teams/1/members",
json=[
{
"orgId": 1,
"teamId": 1,
"userId": 3,
"email": "user1@email.com",
"login": "user1",
"avatarUrl": "/avatar/1b3c32f6386b0185c40d359cdc733a79",
}
],
)
members = self.cli.teams.get_team_members("1")
self.assertEqual(members[0]["login"], "user1")
@requests_mock.Mocker()
def test_add_team_member(self, m):
m.post(
"http://localhost/api/teams/1/members",
json={"message": "Member added to Team"},
)
history = m.request_history
add_res = self.cli.teams.add_team_member("1", "3")
self.assertEqual(history[0].json()["userId"], "3")
self.assertEqual(add_res["message"], "Member added to Team")
@requests_mock.Mocker()
def test_remove_team_member(self, m):
m.delete(
"http://localhost/api/teams/13/members/2",
json={"message": "Team member removed"},
)
remove_res = self.cli.teams.remove_team_member("13", "2")
self.assertEqual(remove_res["message"], "Team member removed")
@requests_mock.Mocker()
def test_get_team_preferences(self, m):
m.get(
"http://localhost/api/teams/1/preferences",
json={"theme": "", "homeDashboardId": 0, "timezone": ""},
)
prefs = self.cli.teams.get_team_preferences("1")
self.assertEqual(prefs["homeDashboardId"], 0)
@requests_mock.Mocker()
def test_update_team_preferences(self, m):
m.put(
"http://localhost/api/teams/1/preferences",
json={"message": "Preferences updated"},
)
prefs = {"theme": "light", "homeDashboardId": 0, "timezone": ""}
updates = self.cli.teams.update_team_preferences("1", prefs)
history = m.request_history
json_payload = history[0].json()
self.assertEqual(json_payload["theme"], "light")
self.assertEqual(updates["message"], "Preferences updated")
| 34.364035 | 87 | 0.47007 |
acf61de4623293d5561d37b631cf175e80b25750 | 35,733 | py | Python | cinder/volume/drivers/hpe/hpe_3par_iscsi.py | elastx/cinder | 00519aef92258275b68b78355734cda31c354839 | [
"Apache-2.0"
] | 3 | 2015-04-02T21:44:36.000Z | 2016-04-29T21:19:04.000Z | cinder/volume/drivers/hpe/hpe_3par_iscsi.py | elastx/cinder | 00519aef92258275b68b78355734cda31c354839 | [
"Apache-2.0"
] | 3 | 2016-04-29T21:45:26.000Z | 2016-05-04T19:41:23.000Z | cinder/volume/drivers/hpe/hpe_3par_iscsi.py | elastx/cinder | 00519aef92258275b68b78355734cda31c354839 | [
"Apache-2.0"
] | 4 | 2016-01-27T00:25:52.000Z | 2021-03-25T19:54:08.000Z | # (c) Copyright 2012-2015 Hewlett Packard Enterprise Development LP
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Volume driver for HPE 3PAR Storage array.
This driver requires 3.1.3 or later firmware on the 3PAR array, using
the 4.x version of the hpe3parclient.
You will need to install the python hpe3parclient.
sudo pip install --upgrade "hpe3parclient>=4.0"
Set the following in the cinder.conf file to enable the
3PAR iSCSI Driver along with the required flags:
volume_driver=cinder.volume.drivers.hpe.hpe_3par_iscsi.HPE3PARISCSIDriver
"""
import re
import sys
try:
from hpe3parclient import exceptions as hpeexceptions
except ImportError:
hpeexceptions = None
from oslo_log import log as logging
from oslo_utils.excutils import save_and_reraise_exception
from cinder import coordination
from cinder import exception
from cinder.i18n import _
from cinder import interface
from cinder import utils
from cinder.volume.drivers.hpe import hpe_3par_base as hpebasedriver
from cinder.volume import volume_utils
LOG = logging.getLogger(__name__)
# EXISTENT_PATH error code returned from hpe3parclient
EXISTENT_PATH = 73
DEFAULT_ISCSI_PORT = 3260
CHAP_USER_KEY = "HPQ-cinder-CHAP-name"
CHAP_PASS_KEY = "HPQ-cinder-CHAP-secret"
@interface.volumedriver
class HPE3PARISCSIDriver(hpebasedriver.HPE3PARDriverBase):
"""OpenStack iSCSI driver to enable 3PAR storage array.
Version history:
.. code-block:: none
1.0 - Initial driver
1.1 - QoS, extend volume, multiple iscsi ports, remove domain,
session changes, faster clone, requires 3.1.2 MU2 firmware.
1.2.0 - Updated the use of the hp3parclient to 2.0.0 and refactored
the drivers to use the new APIs.
1.2.1 - Synchronized extend_volume method.
1.2.2 - Added try/finally around client login/logout.
1.2.3 - log exceptions before raising
1.2.4 - Fixed iSCSI active path bug #1224594
1.2.5 - Added metadata during attach/detach bug #1258033
1.2.6 - Use least-used iscsi n:s:p for iscsi volume attach bug #1269515
This update now requires 3.1.2 MU3 firmware
1.3.0 - Removed all SSH code. We rely on the hp3parclient now.
2.0.0 - Update hp3parclient API uses 3.0.x
2.0.2 - Add back-end assisted volume migrate
2.0.3 - Added support for managing/unmanaging of volumes
2.0.4 - Added support for volume retype
2.0.5 - Added CHAP support, requires 3.1.3 MU1 firmware
and hp3parclient 3.1.0.
2.0.6 - Fixing missing login/logout around attach/detach bug #1367429
2.0.7 - Add support for pools with model update
2.0.8 - Migrate without losing type settings bug #1356608
2.0.9 - Removing locks bug #1381190
2.0.10 - Add call to queryHost instead SSH based findHost #1398206
2.0.11 - Added missing host name during attach fix #1398206
2.0.12 - Removed usage of host name cache #1398914
2.0.13 - Update LOG usage to fix translations. bug #1384312
2.0.14 - Do not allow a different iSCSI IP (hp3par_iscsi_ips) to be
used during live-migration. bug #1423958
2.0.15 - Added support for updated detach_volume attachment.
2.0.16 - Added encrypted property to initialize_connection #1439917
2.0.17 - Python 3 fixes
2.0.18 - Improved VLUN creation and deletion logic. #1469816
2.0.19 - Changed initialize_connection to use getHostVLUNs. #1475064
2.0.20 - Adding changes to support 3PAR iSCSI multipath.
2.0.21 - Adds consistency group support
2.0.22 - Update driver to use ABC metaclasses
2.0.23 - Added update_migrated_volume. bug # 1492023
3.0.0 - Rebranded HP to HPE.
3.0.1 - Python 3 support
3.0.2 - Remove db access for consistency groups
3.0.3 - Fix multipath dictionary key error. bug #1522062
3.0.4 - Adds v2 managed replication support
3.0.5 - Adds v2 unmanaged replication support
3.0.6 - Adding manage/unmanage snapshot support
3.0.7 - Optimize array ID retrieval
3.0.8 - Update replication to version 2.1
3.0.9 - Use same LUN ID for each VLUN path #1551994
3.0.10 - Remove metadata that tracks the instance ID. bug #1572665
3.0.11 - _create_3par_iscsi_host() now accepts iscsi_iqn as list only.
Bug #1590180
3.0.12 - Added entry point tracing
3.0.13 - Handling HTTP conflict 409, host WWN/iSCSI name already used
by another host, while creating 3PAR iSCSI Host. bug #1642945
3.0.14 - Handle manage and unmanage hosts present. bug #1648067
3.0.15 - Adds consistency group capability in generic volume groups.
3.0.16 - Get host from os-brick connector. bug #1690244
4.0.0 - Adds base class.
4.0.1 - Update CHAP on host record when volume is migrated
to new compute host. bug # 1737181
4.0.2 - Handle force detach case. bug #1686745
4.0.3 - Set proper backend on subsequent operation, after group
failover. bug #1773069
4.0.4 - Added Peer Persistence feature
4.0.5 - Added Primera array check. bug #1849525
"""
VERSION = "4.0.5"
# The name of the CI wiki page.
CI_WIKI_NAME = "HPE_Storage_CI"
def __init__(self, *args, **kwargs):
super(HPE3PARISCSIDriver, self).__init__(*args, **kwargs)
self.protocol = 'iSCSI'
def _do_setup(self, common):
client_obj = common.client
is_primera = client_obj.is_primera_array()
if is_primera:
LOG.error("For Primera, only FC is supported. "
"iSCSI cannot be used")
raise NotImplementedError()
self.iscsi_ips = {}
common.client_login()
try:
self.initialize_iscsi_ports(common)
finally:
self._logout(common)
def initialize_iscsi_ports(self, common,
remote_target=None, remote_client=None):
# map iscsi_ip-> ip_port
# -> iqn
# -> nsp
iscsi_ip_list = {}
temp_iscsi_ip = {}
if remote_target:
backend_conf = remote_target
else:
backend_conf = common._client_conf
# use the 3PAR ip_addr list for iSCSI configuration
if len(backend_conf['hpe3par_iscsi_ips']) > 0:
# add port values to ip_addr, if necessary
for ip_addr in backend_conf['hpe3par_iscsi_ips']:
ip = ip_addr.split(':')
if len(ip) == 1:
temp_iscsi_ip[ip_addr] = {'ip_port': DEFAULT_ISCSI_PORT}
elif len(ip) == 2:
temp_iscsi_ip[ip[0]] = {'ip_port': ip[1]}
else:
LOG.warning("Invalid IP address format '%s'", ip_addr)
# add the single value iscsi_ip_address option to the IP dictionary.
# This way we can see if it's a valid iSCSI IP. If it's not valid,
# we won't use it and won't bother to report it, see below
if 'iscsi_ip_address' in backend_conf:
if (backend_conf['iscsi_ip_address'] not in temp_iscsi_ip):
ip = backend_conf['iscsi_ip_address']
ip_port = backend_conf['iscsi_port']
temp_iscsi_ip[ip] = {'ip_port': ip_port}
# get all the valid iSCSI ports from 3PAR
# when found, add the valid iSCSI ip, ip port, iqn and nsp
# to the iSCSI IP dictionary
iscsi_ports = common.get_active_iscsi_target_ports(remote_client)
for port in iscsi_ports:
ip = port['IPAddr']
if ip in temp_iscsi_ip:
ip_port = temp_iscsi_ip[ip]['ip_port']
iscsi_ip_list[ip] = {'ip_port': ip_port,
'nsp': port['nsp'],
'iqn': port['iSCSIName']}
del temp_iscsi_ip[ip]
# if the single value iscsi_ip_address option is still in the
# temp dictionary it's because it defaults to $my_ip which doesn't
# make sense in this context. So, if present, remove it and move on.
if 'iscsi_ip_address' in backend_conf:
if backend_conf['iscsi_ip_address'] in temp_iscsi_ip:
del temp_iscsi_ip[backend_conf['iscsi_ip_address']]
# lets see if there are invalid iSCSI IPs left in the temp dict
if len(temp_iscsi_ip) > 0:
LOG.warning("Found invalid iSCSI IP address(s) in "
"configuration option(s) hpe3par_iscsi_ips or "
"target_ip_address '%s.'",
(", ".join(temp_iscsi_ip)))
if not len(iscsi_ip_list):
msg = _('At least one valid iSCSI IP address must be set.')
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
if remote_target:
self.iscsi_ips[remote_target['hpe3par_api_url']] = iscsi_ip_list
else:
self.iscsi_ips[common._client_conf['hpe3par_api_url']] = (
iscsi_ip_list)
def _initialize_connection_common(self, volume, connector, common,
host, iscsi_ips, ready_ports,
target_portals, target_iqns, target_luns,
remote_client=None):
# Target portal ips are defined in cinder.conf.
target_portal_ips = iscsi_ips.keys()
# Collect all existing VLUNs for this volume/host combination.
existing_vluns = common.find_existing_vluns(volume, host,
remote_client)
# Cycle through each ready iSCSI port and determine if a new
# VLUN should be created or an existing one used.
lun_id = None
for port in ready_ports:
iscsi_ip = port['IPAddr']
if iscsi_ip in target_portal_ips:
vlun = None
# check for an already existing VLUN matching the
# nsp for this iSCSI IP. If one is found, use it
# instead of creating a new VLUN.
for v in existing_vluns:
portPos = common.build_portPos(
iscsi_ips[iscsi_ip]['nsp'])
if v['portPos'] == portPos:
vlun = v
break
else:
vlun = common.create_vlun(
volume, host, iscsi_ips[iscsi_ip]['nsp'],
lun_id=lun_id, remote_client=remote_client)
# We want to use the same LUN ID for every port
if lun_id is None:
lun_id = vlun['lun']
iscsi_ip_port = "%s:%s" % (
iscsi_ip, iscsi_ips[iscsi_ip]['ip_port'])
target_portals.append(iscsi_ip_port)
target_iqns.append(port['iSCSIName'])
target_luns.append(vlun['lun'])
else:
LOG.warning("iSCSI IP: '%s' was not found in "
"hpe3par_iscsi_ips list defined in "
"cinder.conf.", iscsi_ip)
@utils.trace
@coordination.synchronized('3par-{volume.id}')
def initialize_connection(self, volume, connector):
"""Assigns the volume to a server.
Assign any created volume to a compute node/host so that it can be
used from that host.
This driver returns a driver_volume_type of 'iscsi'.
The format of the driver data is defined in _get_iscsi_properties.
Example return value:
.. code-block:: default
{
'driver_volume_type': 'iscsi',
'data': {
'encrypted': False,
'target_discovered': True,
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000001',
'target_protal': '127.0.0.1:3260',
'volume_id': 1,
}
}
Steps to export a volume on 3PAR
* Get the 3PAR iSCSI iqn
* Create a host on the 3par
* create vlun on the 3par
"""
LOG.debug("volume id: %(volume_id)s",
{'volume_id': volume['id']})
array_id = self.get_volume_replication_driver_data(volume)
common = self._login(array_id=array_id)
try:
# If the volume has been failed over, we need to reinitialize
# iSCSI ports so they represent the new array.
if volume.get('replication_status') == 'failed-over' and (
common._client_conf['hpe3par_api_url'] not in self.iscsi_ips):
self.initialize_iscsi_ports(common)
# Grab the correct iSCSI ports
iscsi_ips = self.iscsi_ips[common._client_conf['hpe3par_api_url']]
# we have to make sure we have a host
host, username, password, cpg = self._create_host(
common,
volume,
connector)
multipath = connector.get('multipath')
LOG.debug("multipath: %(multipath)s",
{'multipath': multipath})
if multipath:
ready_ports = common.client.getiSCSIPorts(
state=common.client.PORT_STATE_READY)
target_portals = []
target_iqns = []
target_luns = []
self._initialize_connection_common(
volume, connector, common,
host, iscsi_ips, ready_ports,
target_portals, target_iqns, target_luns)
if volume.get('replication_status') == 'enabled':
LOG.debug('This is a replication setup')
remote_target = common._replication_targets[0]
replication_mode = remote_target['replication_mode']
quorum_witness_ip = (
remote_target.get('quorum_witness_ip'))
if replication_mode == 1:
LOG.debug('replication_mode is sync')
if quorum_witness_ip:
LOG.debug('quorum_witness_ip is present')
LOG.debug('Peer Persistence has been configured')
else:
LOG.debug('Since quorum_witness_ip is absent, '
'considering this as Active/Passive '
'replication')
else:
LOG.debug('Active/Passive replication has been '
'configured')
if replication_mode == 1 and quorum_witness_ip:
remote_client = (
common._create_replication_client(remote_target))
self.initialize_iscsi_ports(
common, remote_target, remote_client)
remote_iscsi_ips = (
self.iscsi_ips[remote_target['hpe3par_api_url']])
# we have to make sure we have a host
host, username, password, cpg = (
self._create_host(
common, volume, connector,
remote_target, cpg, remote_client))
ready_ports = remote_client.getiSCSIPorts(
state=remote_client.PORT_STATE_READY)
self._initialize_connection_common(
volume, connector, common,
host, remote_iscsi_ips, ready_ports,
target_portals, target_iqns, target_luns,
remote_client)
common._destroy_replication_client(remote_client)
info = {'driver_volume_type': 'iscsi',
'data': {'target_portals': target_portals,
'target_iqns': target_iqns,
'target_luns': target_luns,
'target_discovered': True
}
}
else:
least_used_nsp = None
# check if a VLUN already exists for this host
existing_vlun = common.find_existing_vlun(volume, host)
if existing_vlun:
# We override the nsp here on purpose to force the
# volume to be exported out the same IP as it already is.
# This happens during nova live-migration, we want to
# disable the picking of a different IP that we export
# the volume to, or nova complains.
least_used_nsp = common.build_nsp(existing_vlun['portPos'])
if not least_used_nsp:
least_used_nsp = self._get_least_used_nsp_for_host(
common,
host['name'])
vlun = None
if existing_vlun is None:
# now that we have a host, create the VLUN
vlun = common.create_vlun(volume, host, least_used_nsp)
else:
vlun = existing_vlun
if least_used_nsp is None:
LOG.warning("Least busy iSCSI port not found, "
"using first iSCSI port in list.")
iscsi_ip = list(iscsi_ips)[0]
else:
iscsi_ip = self._get_ip_using_nsp(least_used_nsp, common)
iscsi_ip_port = iscsi_ips[iscsi_ip]['ip_port']
iscsi_target_iqn = iscsi_ips[iscsi_ip]['iqn']
info = {'driver_volume_type': 'iscsi',
'data': {'target_portal': "%s:%s" %
(iscsi_ip, iscsi_ip_port),
'target_iqn': iscsi_target_iqn,
'target_lun': vlun['lun'],
'target_discovered': True
}
}
if common._client_conf['hpe3par_iscsi_chap_enabled']:
info['data']['auth_method'] = 'CHAP'
info['data']['auth_username'] = username
info['data']['auth_password'] = password
encryption_key_id = volume.get('encryption_key_id', None)
info['data']['encrypted'] = encryption_key_id is not None
return info
finally:
self._logout(common)
@utils.trace
@coordination.synchronized('3par-{volume.id}')
def terminate_connection(self, volume, connector, **kwargs):
"""Driver entry point to detach a volume from an instance."""
array_id = self.get_volume_replication_driver_data(volume)
common = self._login(array_id=array_id)
try:
is_force_detach = connector is None
remote_client = None
multipath = False
if connector:
multipath = connector.get('multipath')
LOG.debug("multipath: %(multipath)s",
{'multipath': multipath})
if multipath:
if volume.get('replication_status') == 'enabled':
LOG.debug('This is a replication setup')
remote_target = common._replication_targets[0]
replication_mode = remote_target['replication_mode']
quorum_witness_ip = (
remote_target.get('quorum_witness_ip'))
if replication_mode == 1:
LOG.debug('replication_mode is sync')
if quorum_witness_ip:
LOG.debug('quorum_witness_ip is present')
LOG.debug('Peer Persistence has been configured')
else:
LOG.debug('Since quorum_witness_ip is absent, '
'considering this as Active/Passive '
'replication')
else:
LOG.debug('Active/Passive replication has been '
'configured')
if replication_mode == 1 and quorum_witness_ip:
remote_client = (
common._create_replication_client(remote_target))
if is_force_detach:
common.terminate_connection(volume, None, None)
else:
hostname = common._safe_hostname(connector['host'])
common.terminate_connection(
volume,
hostname,
iqn=connector['initiator'],
remote_client=remote_client)
self._clear_chap_3par(common, volume)
finally:
self._logout(common)
def _clear_chap_3par(self, common, volume):
"""Clears CHAP credentials on a 3par volume.
Ignore exceptions caused by the keys not being present on a volume.
"""
vol_name = common._get_3par_vol_name(volume['id'])
try:
common.client.removeVolumeMetaData(vol_name, CHAP_USER_KEY)
except hpeexceptions.HTTPNotFound:
pass
except Exception:
raise
try:
common.client.removeVolumeMetaData(vol_name, CHAP_PASS_KEY)
except hpeexceptions.HTTPNotFound:
pass
except Exception:
raise
def _create_3par_iscsi_host(self, common, hostname, iscsi_iqn, domain,
persona_id, remote_client=None):
"""Create a 3PAR host.
Create a 3PAR host, if there is already a host on the 3par using
the same iqn but with a different hostname, return the hostname
used by 3PAR.
"""
# first search for an existing host
host_found = None
if remote_client:
client_obj = remote_client
else:
client_obj = common.client
hosts = client_obj.queryHost(iqns=iscsi_iqn)
if hosts and hosts['members'] and 'name' in hosts['members'][0]:
host_found = hosts['members'][0]['name']
if host_found is not None:
return host_found
else:
persona_id = int(persona_id)
try:
client_obj.createHost(hostname, iscsiNames=iscsi_iqn,
optional={'domain': domain,
'persona': persona_id})
except hpeexceptions.HTTPConflict as path_conflict:
msg = "Create iSCSI host caught HTTP conflict code: %s"
with save_and_reraise_exception(reraise=False) as ctxt:
if path_conflict.get_code() is EXISTENT_PATH:
# Handle exception : EXISTENT_PATH - host WWN/iSCSI
# name already used by another host
hosts = client_obj.queryHost(iqns=iscsi_iqn)
if hosts and hosts['members'] and (
'name' in hosts['members'][0]):
hostname = hosts['members'][0]['name']
else:
# re-raise last caught exception
ctxt.reraise = True
LOG.exception(msg, path_conflict.get_code())
else:
# re-raise last caught exception
# for other HTTP conflict
ctxt.reraise = True
LOG.exception(msg, path_conflict.get_code())
return hostname
def _modify_3par_iscsi_host(self, common, hostname, iscsi_iqn):
mod_request = {'pathOperation': common.client.HOST_EDIT_ADD,
'iSCSINames': [iscsi_iqn]}
common.client.modifyHost(hostname, mod_request)
def _set_3par_chaps(self, common, hostname, volume, username, password):
"""Sets a 3PAR host's CHAP credentials."""
if not common._client_conf['hpe3par_iscsi_chap_enabled']:
return
mod_request = {'chapOperation': common.client.HOST_EDIT_ADD,
'chapOperationMode': common.client.CHAP_INITIATOR,
'chapName': username,
'chapSecret': password}
common.client.modifyHost(hostname, mod_request)
def _create_host(self, common, volume, connector,
remote_target=None, src_cpg=None, remote_client=None):
"""Creates or modifies existing 3PAR host."""
# make sure we don't have the host already
host = None
domain = None
username = None
password = None
hostname = common._safe_hostname(connector['host'])
if remote_target:
cpg = common._get_cpg_from_cpg_map(
remote_target['cpg_map'], src_cpg)
cpg_obj = remote_client.getCPG(cpg)
if 'domain' in cpg_obj:
domain = cpg_obj['domain']
else:
cpg = common.get_cpg(volume, allowSnap=True)
domain = common.get_domain(cpg)
if not remote_target:
# Get the CHAP secret if CHAP is enabled
if common._client_conf['hpe3par_iscsi_chap_enabled']:
vol_name = common._get_3par_vol_name(volume['id'])
username = common.client.getVolumeMetaData(
vol_name, CHAP_USER_KEY)['value']
password = common.client.getVolumeMetaData(
vol_name, CHAP_PASS_KEY)['value']
try:
if remote_target:
host = remote_client.getHost(hostname)
else:
host = common._get_3par_host(hostname)
# Check whether host with iqn of initiator present on 3par
hosts = common.client.queryHost(iqns=[connector['initiator']])
host, hostname = (
common._get_prioritized_host_on_3par(
host, hosts, hostname))
except hpeexceptions.HTTPNotFound:
# get persona from the volume type extra specs
persona_id = common.get_persona_type(volume)
# host doesn't exist, we have to create it
hostname = self._create_3par_iscsi_host(common,
hostname,
[connector['initiator']],
domain,
persona_id,
remote_client)
else:
if not remote_target:
if 'iSCSIPaths' not in host or len(host['iSCSIPaths']) < 1:
self._modify_3par_iscsi_host(
common, hostname,
connector['initiator'])
elif (not host['initiatorChapEnabled'] and
common._client_conf['hpe3par_iscsi_chap_enabled']):
LOG.warning("Host exists without CHAP credentials set and "
"has iSCSI attachments but CHAP is enabled. "
"Updating host with new CHAP credentials.")
if remote_target:
host = remote_client.getHost(hostname)
else:
# set/update the chap details for the host
self._set_3par_chaps(common, hostname, volume, username, password)
host = common._get_3par_host(hostname)
return host, username, password, cpg
def _do_export(self, common, volume, connector):
"""Gets the associated account, generates CHAP info and updates."""
model_update = {}
if not common._client_conf['hpe3par_iscsi_chap_enabled']:
model_update['provider_auth'] = None
return model_update
# CHAP username will be the hostname
chap_username = connector['host']
chap_password = None
try:
# Get all active VLUNs for the host
vluns = common.client.getHostVLUNs(chap_username)
# Host has active VLUNs... is CHAP enabled on host?
host_info = common.client.getHost(chap_username)
if not host_info['initiatorChapEnabled']:
LOG.warning("Host has no CHAP key, but CHAP is enabled.")
except hpeexceptions.HTTPNotFound:
chap_password = volume_utils.generate_password(16)
LOG.warning("No host or VLUNs exist. Generating new "
"CHAP key.")
else:
# Get a list of all iSCSI VLUNs and see if there is already a CHAP
# key assigned to one of them. Use that CHAP key if present,
# otherwise create a new one. Skip any VLUNs that are missing
# CHAP credentials in metadata.
chap_exists = False
active_vluns = 0
for vlun in vluns:
if not vlun['active']:
continue
active_vluns += 1
# iSCSI connections start with 'iqn'.
if ('remoteName' in vlun and
re.match('iqn.*', vlun['remoteName'])):
try:
chap_password = common.client.getVolumeMetaData(
vlun['volumeName'], CHAP_PASS_KEY)['value']
chap_exists = True
break
except hpeexceptions.HTTPNotFound:
LOG.debug("The VLUN %s is missing CHAP credentials "
"but CHAP is enabled. Skipping.",
vlun['remoteName'])
else:
LOG.warning("Non-iSCSI VLUN detected.")
if not chap_exists:
chap_password = volume_utils.generate_password(16)
LOG.warning("No VLUN contained CHAP credentials. "
"Generating new CHAP key.")
# Add CHAP credentials to the volume metadata
vol_name = common._get_3par_vol_name(volume['id'])
common.client.setVolumeMetaData(
vol_name, CHAP_USER_KEY, chap_username)
common.client.setVolumeMetaData(
vol_name, CHAP_PASS_KEY, chap_password)
model_update['provider_auth'] = ('CHAP %s %s' %
(chap_username, chap_password))
return model_update
@utils.trace
def create_export(self, context, volume, connector):
common = self._login()
try:
return self._do_export(common, volume, connector)
finally:
self._logout(common)
@utils.trace
def ensure_export(self, context, volume):
"""Ensure the volume still exists on the 3PAR.
Also retrieves CHAP credentials, if present on the volume
"""
common = self._login()
try:
vol_name = common._get_3par_vol_name(volume['id'])
common.client.getVolume(vol_name)
except hpeexceptions.HTTPNotFound:
LOG.error("Volume %s doesn't exist on array.", vol_name)
else:
metadata = common.client.getAllVolumeMetaData(vol_name)
username = None
password = None
model_update = {}
model_update['provider_auth'] = None
for member in metadata['members']:
if member['key'] == CHAP_USER_KEY:
username = member['value']
elif member['key'] == CHAP_PASS_KEY:
password = member['value']
if username and password:
model_update['provider_auth'] = ('CHAP %s %s' %
(username, password))
return model_update
finally:
self._logout(common)
def _get_least_used_nsp_for_host(self, common, hostname):
"""Get the least used NSP for the current host.
Steps to determine which NSP to use.
* If only one iSCSI NSP, return it
* If there is already an active vlun to this host, return its NSP
* Return NSP with fewest active vluns
"""
iscsi_nsps = self._get_iscsi_nsps(common)
# If there's only one path, use it
if len(iscsi_nsps) == 1:
return iscsi_nsps[0]
# Try to reuse an existing iscsi path to the host
vluns = common.client.getVLUNs()
for vlun in vluns['members']:
if vlun['active']:
if vlun['hostname'] == hostname:
temp_nsp = common.build_nsp(vlun['portPos'])
if temp_nsp in iscsi_nsps:
# this host already has an iscsi path, so use it
return temp_nsp
# Calculate the least used iscsi nsp
least_used_nsp = self._get_least_used_nsp(common,
vluns['members'],
self._get_iscsi_nsps(common))
return least_used_nsp
def _get_iscsi_nsps(self, common):
"""Return the list of candidate nsps."""
nsps = []
iscsi_ips = self.iscsi_ips[common._client_conf['hpe3par_api_url']]
for value in iscsi_ips.values():
nsps.append(value['nsp'])
return nsps
def _get_ip_using_nsp(self, nsp, common):
"""Return IP associated with given nsp."""
iscsi_ips = self.iscsi_ips[common._client_conf['hpe3par_api_url']]
for (key, value) in iscsi_ips.items():
if value['nsp'] == nsp:
return key
def _get_least_used_nsp(self, common, vluns, nspss):
"""Return the nsp that has the fewest active vluns."""
# return only the nsp (node:server:port)
# count the number of nsps
nsp_counts = {}
for nsp in nspss:
# initialize counts to zero
nsp_counts[nsp] = 0
current_least_used_nsp = None
for vlun in vluns:
if vlun['active']:
nsp = common.build_nsp(vlun['portPos'])
if nsp in nsp_counts:
nsp_counts[nsp] = nsp_counts[nsp] + 1
# identify key (nsp) of least used nsp
current_smallest_count = sys.maxsize
for (nsp, count) in nsp_counts.items():
if count < current_smallest_count:
current_least_used_nsp = nsp
current_smallest_count = count
return current_least_used_nsp
| 42.137972 | 79 | 0.551787 |
acf61e75a06710ac63347e3bca3775840f10761d | 121 | py | Python | aula06b.py | RodrigoMoro3736/Python | 14ae88b81c252c1235ac0f904d0f77a8db870078 | [
"MIT"
] | null | null | null | aula06b.py | RodrigoMoro3736/Python | 14ae88b81c252c1235ac0f904d0f77a8db870078 | [
"MIT"
] | null | null | null | aula06b.py | RodrigoMoro3736/Python | 14ae88b81c252c1235ac0f904d0f77a8db870078 | [
"MIT"
] | null | null | null | n = int(input('digite um numero: '))
if n == 77:
print('vc digitou 77 né...')
else:
print('vc não digitou 77...') | 24.2 | 36 | 0.570248 |
acf61f9e459cd3c78cad3d40fa9c6b4058b91fbc | 1,287 | py | Python | src/lib/bver/Versioned/Software.py | backboneHQ/bver | c3c929442fadb28a3f39d0ddec19fb2dfc7a4732 | [
"MIT"
] | 1 | 2021-09-09T01:22:37.000Z | 2021-09-09T01:22:37.000Z | src/lib/bver/Versioned/Software.py | backboneHQ/bver | c3c929442fadb28a3f39d0ddec19fb2dfc7a4732 | [
"MIT"
] | null | null | null | src/lib/bver/Versioned/Software.py | backboneHQ/bver | c3c929442fadb28a3f39d0ddec19fb2dfc7a4732 | [
"MIT"
] | 1 | 2021-09-03T18:45:15.000Z | 2021-09-03T18:45:15.000Z | from .Versioned import Versioned
from .Addon import Addon
class InvalidAddonError(Exception):
"""Invalid addon error."""
class Software(Versioned):
"""
Implements software support to the versioned.
"""
def __init__(self, *args, **kwargs):
"""
Create a software object.
"""
super(Software, self).__init__(*args, **kwargs)
self.__addons = {}
def addAddon(self, addon):
"""
Add an addon to the software.
"""
assert isinstance(addon, Addon), "Invalid addon type!"
self.__addons[addon.name()] = addon
def addon(self, name):
"""
Return an addon object.
"""
if name not in self.__addons:
raise InvalidAddonError('Invalid addon "{0}"'.format(name))
return self.__addons[name]
def addonNames(self):
"""
Return a list of addon names.
"""
return self.__addons.keys()
def bverName(self, addon=None):
"""
Return the environment variable name of the versioned.
"""
if addon:
assert isinstance(addon, Addon), "Invalid addon type!"
return Versioned.toBverName(self.name(), addon.name())
return Versioned.toBverName(self.name())
| 24.75 | 71 | 0.576535 |
acf61fafab1b9a7450939209de330ee55d9972ee | 1,469 | py | Python | hood/migrations/0001_initial.py | Kernael92/hood | f3ec48248c6626f30fa286c3c3d4a97a2cb4fb9c | [
"Unlicense"
] | null | null | null | hood/migrations/0001_initial.py | Kernael92/hood | f3ec48248c6626f30fa286c3c3d4a97a2cb4fb9c | [
"Unlicense"
] | null | null | null | hood/migrations/0001_initial.py | Kernael92/hood | f3ec48248c6626f30fa286c3c3d4a97a2cb4fb9c | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-01-09 21:19
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Hood',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=180)),
('location', models.CharField(max_length=180)),
('occupants_count', models.IntegerField()),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile_pi', models.ImageField(upload_to='profile_pic/')),
('email', models.EmailField(max_length=254)),
('phone_number', models.CharField(blank=True, max_length=10)),
('hood', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hood.Hood')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 36.725 | 118 | 0.608577 |
acf621b34c34c1e11afb1c5cdcad27a00690d4f6 | 2,461 | py | Python | shared/zoo/models.py | google-research/adamatch | 83d9a1c86c177fa5c2832db1d0a30168d65b2019 | [
"Apache-2.0"
] | 36 | 2021-07-20T20:20:18.000Z | 2022-03-31T03:38:08.000Z | shared/zoo/models.py | google-research/adamatch | 83d9a1c86c177fa5c2832db1d0a30168d65b2019 | [
"Apache-2.0"
] | 5 | 2021-07-21T11:59:42.000Z | 2022-03-07T06:10:08.000Z | shared/zoo/models.py | google-research/adamatch | 83d9a1c86c177fa5c2832db1d0a30168d65b2019 | [
"Apache-2.0"
] | 4 | 2021-09-06T02:48:10.000Z | 2022-03-15T06:46:48.000Z | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import jax.numpy as jn
import objax
from objax.typing import JaxArray
from objax.zoo.resnet_v2 import ResNet101, load_pretrained_weights_from_keras
from shared.zoo.wideresnet import WideResNet
ARCHS = 'wrn28-2 wrn28-4 wrn34-2 wrn40-2 resnet101 resnet101pretrain'.split()
class preprocess(objax.nn.Sequential):
@staticmethod
def _swap_channel(x):
return x[:, ::-1, :, :]
@staticmethod
def _scale_back(x):
return (x * 128) + 127.5
def _subtract_mean(self, x):
return x - jn.array([103.939, 116.779, 123.68])[None, :, None, None]
def __init__(self):
ops = [self._swap_channel, self._scale_back, self._subtract_mean]
super().__init__(ops)
def resnet(cls, colors, nclass, bn=objax.nn.BatchNorm2D, **kwargs):
return cls(colors, nclass, normalization_fn=bn, **objax.util.local_kwargs(kwargs, ResNet101))
def resnet_pretrained(cls, colors, nclass, bn=objax.nn.BatchNorm2D, **kwargs):
preprocess_input = preprocess()
model = cls(include_top=False, num_classes=nclass)
return objax.nn.Sequential(preprocess_input + model)
def network(arch: str):
if arch == 'wrn28-2':
return functools.partial(WideResNet, scales=3, filters=32, repeat=4)
elif arch == 'wrn28-4':
return functools.partial(WideResNet, scales=3, filters=64, repeat=4)
elif arch == 'wrn34-2':
return functools.partial(WideResNet, scales=4, filters=32, repeat=4)
elif arch == 'wrn40-2':
return functools.partial(WideResNet, scales=5, filters=32, repeat=4)
elif arch == 'resnet101':
return functools.partial(resnet, cls=ResNet101)
elif arch == 'resnet101pretrain':
return functools.partial(resnet_pretrained, cls=functools.partial(load_pretrained_weights_from_keras,
arch='ResNet101'))
raise ValueError('Architecture not recognized', arch)
| 37.287879 | 109 | 0.71475 |
acf622ad67468def63430cce9d9d49b3e4ac8ae7 | 1,160 | py | Python | test.py | The-SocialLion/Malaria-Cell-Images-Classification-using-CNN | 7a044353adecc7fe536956c2044b266aa3c0724a | [
"Apache-2.0"
] | null | null | null | test.py | The-SocialLion/Malaria-Cell-Images-Classification-using-CNN | 7a044353adecc7fe536956c2044b266aa3c0724a | [
"Apache-2.0"
] | null | null | null | test.py | The-SocialLion/Malaria-Cell-Images-Classification-using-CNN | 7a044353adecc7fe536956c2044b266aa3c0724a | [
"Apache-2.0"
] | null | null | null | import numpy as np
import os
from tensorflow.keras.models import load_model
from PIL import ImageOps
from tensorflow.keras.preprocessing import image# used for preproccesing
# loading or reading the saved model ,weights
model=load_model("MCI.h5")
print("loaded model from disk")
Img=64
#classification of images
def classify(img_file):
test_image=image.load_img(img_file)
test_image=ImageOps.grayscale(test_image)
test_image = test_image.resize((Img,Img))
test_image=np.expand_dims(test_image,axis=0)
test = np.array(test_image).reshape(-1,Img,Img,1)
result=model.predict(test)
if result[0][0]==0:
prediction='Parasitized'
else:
prediction='Uninfected'
print("The prediction is {0},for the image {1}".format(prediction,img_file))
#storing the images in this folder
import os
path='D:/python/dl programs/CNN/Malaria Cell images/test'
files=[]
# r=root,d=directories,f=files
for r,d,f in os.walk(path):
for file in f:
if '.jpeg' or '.jpg' or '.png' or '.JPG' in file:
files.append(os.path.join(r,file))
for f in files:
classify(f)
print('\n')
| 33.142857 | 81 | 0.688793 |
acf6233d88a2d28390e2c37a205e3a0285cc87db | 46 | py | Python | pdm/resolver/__init__.py | frafra/pdm | 12c5c4f91bbb7260be7d93f3e3914ba708309032 | [
"MIT"
] | 1,731 | 2020-01-21T10:17:37.000Z | 2022-03-31T22:01:27.000Z | pdm/resolver/__init__.py | frafra/pdm | 12c5c4f91bbb7260be7d93f3e3914ba708309032 | [
"MIT"
] | 562 | 2020-01-20T04:53:14.000Z | 2022-03-31T23:28:41.000Z | pdm/resolver/__init__.py | frafra/pdm | 12c5c4f91bbb7260be7d93f3e3914ba708309032 | [
"MIT"
] | 86 | 2020-02-06T11:20:15.000Z | 2022-03-30T13:28:10.000Z | from pdm.resolver.core import resolve # noqa
| 23 | 45 | 0.782609 |
acf62515d8dfbff734f705d00790a7e194d332d3 | 1,366 | py | Python | thing/tasks/__init__.py | Gillingham/evething | e00b722cf00a6a3a25e1fff3014ed3365c7ef3e4 | [
"BSD-2-Clause"
] | 33 | 2015-02-18T00:07:57.000Z | 2020-06-09T15:19:05.000Z | thing/tasks/__init__.py | Gillingham/evething | e00b722cf00a6a3a25e1fff3014ed3365c7ef3e4 | [
"BSD-2-Clause"
] | 19 | 2015-03-09T19:51:43.000Z | 2019-10-19T12:04:23.000Z | thing/tasks/__init__.py | Gillingham/evething | e00b722cf00a6a3a25e1fff3014ed3365c7ef3e4 | [
"BSD-2-Clause"
] | 20 | 2015-02-20T17:53:17.000Z | 2022-02-11T06:29:11.000Z | # flake8: noqa
# Internal tasks
from purgeapikey import purge_api_key
from tablecleaner import table_cleaner
from taskspawner import task_spawner
# APIKey tasks
from accountbalance import AccountBalance
from accountstatus import AccountStatus
from apikeyinfo import APIKeyInfo
from assetlist import AssetList
from characterinfo import CharacterInfo
from charactersheet import CharacterSheet
from contracts import Contracts
from corporationsheet import CorporationSheet
from industryjobs import IndustryJobs
from locations import Locations
from mailinglists import MailingLists
from mailbodies import MailBodies
from mailmessages import MailMessages
from marketorders import MarketOrders
# from membertracking import MemberTracking
# from shareholders import Shareholders
from skillqueue import SkillQueue
from standings import Standings
from walletjournal import WalletJournal
from wallettransactions import WalletTransactions
from planetarycolonies import PlanetaryColonies
from planetarypins import PlanetaryPins
# Global API tasks
from alliancelist import AllianceList
from conquerablestationlist import ConquerableStationList
from reftypes import RefTypes
from serverstatus import ServerStatus
# Periodic tasks
from fixnames import FixNames
from historyupdater import HistoryUpdater
from priceupdater import PriceUpdater
from fixcontracts import FixContracts
| 32.52381 | 57 | 0.882138 |
acf625239af1abff8f8e39bc796591afc18e8e4d | 537 | py | Python | appli/m_tchatbox.py | kasshyss/fourcheball | 12f0312eb32cc2fb247d8a5d8fb6b0b7ea195a32 | [
"MIT"
] | null | null | null | appli/m_tchatbox.py | kasshyss/fourcheball | 12f0312eb32cc2fb247d8a5d8fb6b0b7ea195a32 | [
"MIT"
] | null | null | null | appli/m_tchatbox.py | kasshyss/fourcheball | 12f0312eb32cc2fb247d8a5d8fb6b0b7ea195a32 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding utf-8 -*-
import aiml
import os
# return tchatbox who learned and ready to go
def init():
bot = aiml.Kernel()
bot.setBotPredicate("name", "Jean-Paul")
#bot.learn(os.path.dirname(os.path.realpath(__file__))+os.path.sep+'std-startup.xml')
bot.respond("load aiml b")
return bot
# Get a question a return the Jean paul anwers
def robot_speak(bot, message):
response = bot.respond(message)
if response == '':
return 'Parle a ma main'
else:
return response
| 24.409091 | 89 | 0.657356 |
acf626e0b9f34a3e3c5c3f87bb9bba488db9bdbc | 357 | py | Python | pytest_faker/plugin.py | pytest-dev/pytest-faker | 5d21f742980c1750749919ebe5f5ce7a6e7ed111 | [
"MIT"
] | 37 | 2015-04-19T12:00:50.000Z | 2021-11-12T11:42:30.000Z | pytest_faker/plugin.py | pytest-dev/pytest-faker | 5d21f742980c1750749919ebe5f5ce7a6e7ed111 | [
"MIT"
] | 11 | 2015-08-20T20:08:46.000Z | 2020-05-14T13:23:18.000Z | pytest_faker/plugin.py | pytest-dev/pytest-faker | 5d21f742980c1750749919ebe5f5ce7a6e7ed111 | [
"MIT"
] | 3 | 2016-09-19T05:00:14.000Z | 2020-07-09T10:23:52.000Z | """pytest-faker plugin."""
import pytest
from faker import Factory
@pytest.fixture(scope='session')
def faker_locale():
"""Faker locale.
None by default which means faker's default locale.
"""
return None
@pytest.fixture(scope='session')
def faker(faker_locale):
"""Faker factory object."""
return Factory.create(faker_locale)
| 17.85 | 55 | 0.694678 |
acf62714fada62a5c8357eb209c1393c5c4e2ec3 | 3,263 | py | Python | tutorials/03-advanced/image_captioning/sample.py | pmtempone/pytorch-tutorial | 286f7b7589e745deefbe0610c31034718e735e9c | [
"MIT"
] | null | null | null | tutorials/03-advanced/image_captioning/sample.py | pmtempone/pytorch-tutorial | 286f7b7589e745deefbe0610c31034718e735e9c | [
"MIT"
] | null | null | null | tutorials/03-advanced/image_captioning/sample.py | pmtempone/pytorch-tutorial | 286f7b7589e745deefbe0610c31034718e735e9c | [
"MIT"
] | 1 | 2020-07-30T02:21:13.000Z | 2020-07-30T02:21:13.000Z | import torch
import matplotlib.pyplot as plt
import numpy as np
import argparse
import pickle
import os
from torch.autograd import Variable
from torchvision import transforms
from build_vocab import Vocabulary
from model import EncoderCNN, DecoderRNN
from PIL import Image
def to_var(x, volatile=False):
if torch.cuda.is_available():
x = x.cuda()
return Variable(x, volatile=volatile)
def load_image(image_path, transform=None):
image = Image.open(image_path)
image = image.resize([224, 224], Image.LANCZOS)
if transform is not None:
image = transform(image).unsqueeze(0)
return image
def main(args):
# Image preprocessing
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406),
(0.229, 0.224, 0.225))])
# Load vocabulary wrapper
with open(args.vocab_path, 'rb') as f:
vocab = pickle.load(f)
# Build Models
encoder = EncoderCNN(args.embed_size)
encoder.eval() # evaluation mode (BN uses moving mean/variance)
decoder = DecoderRNN(args.embed_size, args.hidden_size,
len(vocab), args.num_layers)
# Load the trained model parameters
encoder.load_state_dict(torch.load(args.encoder_path))
decoder.load_state_dict(torch.load(args.decoder_path))
# Prepare Image
image = load_image(args.image, transform)
image_tensor = to_var(image, volatile=True)
# If use gpu
if torch.cuda.is_available():
encoder.cuda()
decoder.cuda()
# Generate caption from image
feature = encoder(image_tensor)
sampled_ids = decoder.sample(feature)
sampled_ids = sampled_ids.cpu().data.numpy()
# Decode word_ids to words
sampled_caption = []
for word_id in sampled_ids:
word = vocab.idx2word[word_id]
sampled_caption.append(word)
if word == '<end>':
break
sentence = ' '.join(sampled_caption)
# Print out image and generated caption.
print (sentence)
plt.imshow(np.asarray(image))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--image', type=str, required=True,
help='input image for generating caption')
parser.add_argument('--encoder_path', type=str, default='./models/encoder-5-3000.pkl',
help='path for trained encoder')
parser.add_argument('--decoder_path', type=str, default='./models/decoder-5-3000.pkl',
help='path for trained decoder')
parser.add_argument('--vocab_path', type=str, default='./data/vocab.pkl',
help='path for vocabulary wrapper')
# Model parameters (should be same as paramters in train.py)
parser.add_argument('--embed_size', type=int , default=256,
help='dimension of word embedding vectors')
parser.add_argument('--hidden_size', type=int , default=512,
help='dimension of lstm hidden states')
parser.add_argument('--num_layers', type=int , default=1 ,
help='number of layers in lstm')
args = parser.parse_args()
main(args) | 33.989583 | 90 | 0.63745 |
acf62812993f2755af02df7b9bcb6893439081ce | 1,680 | py | Python | cities_light/contrib/ajax_selects_lookups.py | affan2/django-cities-light | 5b65d47adc63e203879dc55bff5a360380adc5e4 | [
"MIT"
] | 4 | 2015-10-05T09:20:35.000Z | 2019-07-30T17:47:03.000Z | cities_light/contrib/ajax_selects_lookups.py | st4lk/django-cities-light | ad303f500f506d44d287ec3d531ff9fd8bc33e34 | [
"MIT"
] | 1 | 2020-01-25T13:02:17.000Z | 2020-01-25T13:02:17.000Z | cities_light/contrib/ajax_selects_lookups.py | st4lk/django-cities-light | ad303f500f506d44d287ec3d531ff9fd8bc33e34 | [
"MIT"
] | 2 | 2019-11-29T15:55:46.000Z | 2020-01-08T09:06:33.000Z | """
Couples cities_light and django-ajax-selects.
Register the lookups in settings.AJAX_LOOKUP_CHANNELS, add::
'cities_light_country': ('cities_light.lookups', 'CountryLookup'),
'cities_light_city': ('cities_light.lookups', 'CityLookup'),
"""
from ajax_select import LookupChannel
from django.db.models import Q
from ..models import *
class StandardLookupChannel(LookupChannel):
"""
Honnestly I'm not sure why this is here.
"""
def format_match(self, obj):
""" (HTML) formatted item for displaying item in the dropdown """
return self.get_result(obj)
def format_item_display(self, obj):
"""
(HTML) formatted item for displaying item in the selected deck area
"""
return self.get_result(obj)
class CountryLookup(StandardLookupChannel):
"""
Lookup channel for Country, hits name and name_ascii.
"""
model = Country
def get_query(self, q, request):
return Country.objects.filter(
Q(name__icontains=q) |
Q(name_ascii__icontains=q)
).distinct()
class RegionLookup(StandardLookupChannel):
"""
Lookup channel for Region, hits name and name_ascii.
"""
model = Region
def get_query(self, q, request):
return Region.objects.filter(
Q(name__icontains=q) |
Q(name_ascii__icontains=q)
).distinct()
class CityLookup(StandardLookupChannel):
"""
Lookup channel for City, hits name and search_names.
"""
model = City
def get_query(self, q, request):
return City.objects.filter(search_names__icontains=q
).select_related('country').distinct()
| 24.347826 | 75 | 0.655952 |
acf628eeed226b7520252f9dbe7b3db3d291d358 | 73,392 | py | Python | kubernetes/client/apis/apps_v1beta1_api.py | amanagarwal33/python | e31693557f75950805fb4dc5af4cb7434a470e26 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/apis/apps_v1beta1_api.py | amanagarwal33/python | e31693557f75950805fb4dc5af4cb7434a470e26 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/apis/apps_v1beta1_api.py | amanagarwal33/python | e31693557f75950805fb4dc5af4cb7434a470e26 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.5.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AppsV1beta1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_namespaced_stateful_set(self, namespace, body, **kwargs):
"""
create a StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_stateful_set(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_stateful_set_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_stateful_set_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_stateful_set_with_http_info(self, namespace, body, **kwargs):
"""
create a StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_stateful_set_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_stateful_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_stateful_set(self, namespace, **kwargs):
"""
delete collection of StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_stateful_set(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: UnversionedStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_stateful_set_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_stateful_set_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_stateful_set_with_http_info(self, namespace, **kwargs):
"""
delete collection of StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_stateful_set_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: UnversionedStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UnversionedStatus',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_stateful_set(self, name, namespace, body, **kwargs):
"""
delete a StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_stateful_set(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list.
:return: UnversionedStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_stateful_set_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_stateful_set_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list.
:return: UnversionedStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_stateful_set`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_stateful_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UnversionedStatus',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: UnversionedAPIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: UnversionedAPIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/apps/v1beta1/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UnversionedAPIResourceList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_stateful_set(self, namespace, **kwargs):
"""
list or watch objects of kind StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_stateful_set(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1StatefulSetList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_stateful_set_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_stateful_set_with_http_info(namespace, **kwargs)
return data
def list_namespaced_stateful_set_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_stateful_set_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1StatefulSetList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSetList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_stateful_set_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_stateful_set_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1StatefulSetList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_stateful_set_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_stateful_set_for_all_namespaces_with_http_info(**kwargs)
return data
def list_stateful_set_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_stateful_set_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1StatefulSetList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_stateful_set_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/apps/v1beta1/statefulsets'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSetList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_stateful_set(self, name, namespace, body, **kwargs):
"""
partially update the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_stateful_set(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UnversionedPatch body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_stateful_set_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_stateful_set_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UnversionedPatch body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_stateful_set`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_stateful_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_stateful_set_status(self, name, namespace, body, **kwargs):
"""
partially update status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_stateful_set_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UnversionedPatch body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_stateful_set_status_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_stateful_set_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UnversionedPatch body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_stateful_set_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_stateful_set_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_stateful_set_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_stateful_set_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_stateful_set(self, name, namespace, **kwargs):
"""
read the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_stateful_set(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_stateful_set_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_stateful_set_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_stateful_set_with_http_info(self, name, namespace, **kwargs):
"""
read the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_stateful_set_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_stateful_set`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'exact' in params:
query_params['exact'] = params['exact']
if 'export' in params:
query_params['export'] = params['export']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_stateful_set_status(self, name, namespace, **kwargs):
"""
read status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_stateful_set_status(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_stateful_set_status_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_stateful_set_status_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_stateful_set_status_with_http_info(self, name, namespace, **kwargs):
"""
read status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_stateful_set_status_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_stateful_set_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_stateful_set_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_stateful_set_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_stateful_set(self, name, namespace, body, **kwargs):
"""
replace the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_stateful_set(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_stateful_set_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_stateful_set_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_stateful_set`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_stateful_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_stateful_set_status(self, name, namespace, body, **kwargs):
"""
replace status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_stateful_set_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_stateful_set_status_with_http_info(self, name, namespace, body, **kwargs):
"""
replace status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_stateful_set_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_stateful_set_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_stateful_set_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_stateful_set_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_stateful_set_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.993324 | 346 | 0.606483 |
acf62913d3b68efc46089ea5172c3625673fe14b | 10,717 | py | Python | legal-api/src/legal_api/models/business.py | areyeslo/lear | 5940437b6c37710be0607a570a5977a0c0fed9d2 | [
"Apache-2.0"
] | null | null | null | legal-api/src/legal_api/models/business.py | areyeslo/lear | 5940437b6c37710be0607a570a5977a0c0fed9d2 | [
"Apache-2.0"
] | null | null | null | legal-api/src/legal_api/models/business.py | areyeslo/lear | 5940437b6c37710be0607a570a5977a0c0fed9d2 | [
"Apache-2.0"
] | null | null | null | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds all of the basic data about a business.
The Business class and Schema are held in this module
"""
from datetime import datetime
from enum import Enum
import datedelta
from sqlalchemy.exc import OperationalError, ResourceClosedError
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import backref
from legal_api.exceptions import BusinessException
from .db import db # noqa: I001
from .address import Address # noqa: F401 pylint: disable=unused-import; needed by the SQLAlchemy relationship
from .alias import Alias # noqa: F401 pylint: disable=unused-import; needed by the SQLAlchemy relationship
from .filing import Filing # noqa: F401 pylint: disable=unused-import; needed by the SQLAlchemy backref
from .office import Office # noqa: F401 pylint: disable=unused-import; needed by the SQLAlchemy relationship
from .party_role import PartyRole # noqa: F401 pylint: disable=unused-import; needed by the SQLAlchemy relationship
from .resolution import Resolution # noqa: F401 pylint: disable=unused-import; needed by the SQLAlchemy backref
from .share_class import ShareClass # noqa: F401 pylint: disable=unused-import
from .user import User # noqa: F401 pylint: disable=unused-import; needed by the SQLAlchemy backref
class Business(db.Model): # pylint: disable=too-many-instance-attributes
"""This class manages all of the base data about a business.
A business is base form of any entity that can interact directly
with people and other businesses.
Businesses can be sole-proprietors, corporations, societies, etc.
"""
class LegalTypes(Enum):
"""Render an Enum of the Business Legal Types."""
COOP = 'CP'
COMP = 'BC'
BCOMP = 'BEN'
__versioned__ = {}
__tablename__ = 'businesses'
id = db.Column(db.Integer, primary_key=True)
last_modified = db.Column('last_modified', db.DateTime(timezone=True), default=datetime.utcnow)
last_ledger_id = db.Column('last_ledger_id', db.Integer)
last_remote_ledger_id = db.Column('last_remote_ledger_id', db.Integer, default=0)
last_ledger_timestamp = db.Column('last_ledger_timestamp', db.DateTime(timezone=True), default=datetime.utcnow)
last_ar_date = db.Column('last_ar_date', db.DateTime(timezone=True))
last_agm_date = db.Column('last_agm_date', db.DateTime(timezone=True))
legal_name = db.Column('legal_name', db.String(1000), index=True)
legal_type = db.Column('legal_type', db.String(10))
founding_date = db.Column('founding_date', db.DateTime(timezone=True), default=datetime.utcnow)
dissolution_date = db.Column('dissolution_date', db.DateTime(timezone=True), default=None)
_identifier = db.Column('identifier', db.String(10), index=True)
tax_id = db.Column('tax_id', db.String(15), index=True)
fiscal_year_end_date = db.Column('fiscal_year_end_date', db.DateTime(timezone=True), default=datetime.utcnow)
restriction_ind = db.Column('restriction_ind', db.Boolean, unique=False, default=False)
last_ar_year = db.Column('last_ar_year', db.Integer)
submitter_userid = db.Column('submitter_userid', db.Integer, db.ForeignKey('users.id'))
submitter = db.relationship('User', backref=backref('submitter', uselist=False), foreign_keys=[submitter_userid])
# relationships
filings = db.relationship('Filing', lazy='dynamic')
offices = db.relationship('Office', lazy='dynamic', cascade='all, delete, delete-orphan')
party_roles = db.relationship('PartyRole', lazy='dynamic')
share_classes = db.relationship('ShareClass', lazy='dynamic', cascade='all, delete, delete-orphan')
aliases = db.relationship('Alias', lazy='dynamic')
resolutions = db.relationship('Resolution', lazy='dynamic')
@hybrid_property
def identifier(self):
"""Return the unique business identifier."""
return self._identifier
@identifier.setter
def identifier(self, value: str):
"""Set the business identifier."""
if Business.validate_identifier(value):
self._identifier = value
else:
raise BusinessException('invalid-identifier-format', 406)
@property
def next_anniversary(self):
"""Retrieve the next anniversary date for which an AR filing is due."""
last_anniversary = self.founding_date
if self.last_ar_date:
last_anniversary = self.last_ar_date
return last_anniversary + datedelta.datedelta(years=1)
@property
def mailing_address(self):
"""Return the mailing address."""
registered_office = db.session.query(Office).filter(Office.business_id == self.id).\
filter(Office.office_type == 'registeredOffice').one_or_none()
if registered_office:
return registered_office.addresses.filter(Address.address_type == 'mailing')
return db.session.query(Address).filter(Address.business_id == self.id). \
filter(Address.address_type == Address.MAILING)
@property
def delivery_address(self):
"""Return the delivery address."""
registered_office = db.session.query(Office).filter(Office.business_id == self.id).\
filter(Office.office_type == 'registeredOffice').one_or_none()
if registered_office:
return registered_office.addresses.filter(Address.address_type == 'delivery')
return db.session.query(Address).filter(Address.business_id == self.id).\
filter(Address.address_type == Address.DELIVERY)
def save(self):
"""Render a Business to the local cache."""
db.session.add(self)
db.session.commit()
def delete(self):
"""Businesses cannot be deleted.
TODO: Hook SQLAlchemy to block deletes
"""
if self.dissolution_date:
self.save()
return self
def json(self):
"""Return the Business as a json object.
None fields are not included.
"""
d = {
'foundingDate': self.founding_date.isoformat(),
'identifier': self.identifier,
'lastModified': self.last_modified.isoformat(),
'lastAnnualReport': datetime.date(self.last_ar_date).isoformat() if self.last_ar_date else '',
'nextAnnualReport': self.next_anniversary.isoformat(),
'lastAnnualGeneralMeetingDate': datetime.date(self.last_agm_date).isoformat() if self.last_agm_date else '',
'lastLedgerTimestamp': self.last_ledger_timestamp.isoformat(),
'legalName': self.legal_name,
'legalType': self.legal_type,
'hasRestrictions': self.restriction_ind
}
# if self.last_remote_ledger_timestamp:
# # this is not a typo, we want the external facing view object ledger timestamp to be the remote one
# d['last_ledger_timestamp'] = self.last_remote_ledger_timestamp.isoformat()
# else:
# d['last_ledger_timestamp'] = None
if self.dissolution_date:
d['dissolutionDate'] = datetime.date(self.dissolution_date).isoformat()
if self.fiscal_year_end_date:
d['fiscalYearEndDate'] = datetime.date(self.fiscal_year_end_date).isoformat()
if self.tax_id:
d['taxId'] = self.tax_id
return d
@classmethod
def find_by_legal_name(cls, legal_name: str = None):
"""Given a legal_name, this will return an Active Business."""
business = None
if legal_name:
try:
business = cls.query.filter_by(legal_name=legal_name).\
filter_by(dissolution_date=None).one_or_none()
except (OperationalError, ResourceClosedError):
# TODO: This usually means a misconfigured database.
# This is not a business error if the cache is unavailable.
return None
return business
@classmethod
def find_by_identifier(cls, identifier: str = None):
"""Return a Business by the id assigned by the Registrar."""
business = None
if identifier:
business = cls.query.filter_by(identifier=identifier).one_or_none()
return business
@classmethod
def find_by_internal_id(cls, internal_id: int = None):
"""Return a Business by the internal id."""
business = None
if internal_id:
business = cls.query.filter_by(id=internal_id).one_or_none()
return business
@classmethod
def get_all_by_no_tax_id(cls):
"""Return all businesses with no tax_id."""
no_tax_id_types = Business.LegalTypes.COOP.value
tax_id_types = [x.value for x in Business.LegalTypes]
tax_id_types.remove(no_tax_id_types)
businesses = cls.query.filter(Business.legal_type.in_(tax_id_types)).filter_by(tax_id=None).all()
return businesses
@classmethod
def get_filing_by_id(cls, business_identifier: int, filing_id: str):
"""Return the filings for a specific business and filing_id."""
filing = db.session.query(Business, Filing). \
filter(Business.id == Filing.business_id). \
filter(Business.identifier == business_identifier). \
filter(Filing.id == filing_id). \
one_or_none()
return None if not filing else filing[1]
@staticmethod
def validate_identifier(identifier: str) -> bool:
"""Validate the identifier meets the Registry naming standards.
All legal entities with BC Reg are PREFIX + 7 digits
CP = BC COOPS prefix;
XCP = Expro COOP prefix
Examples:
ie: CP1234567 or XCP1234567
"""
if identifier[:2] == 'NR':
return True
if len(identifier) < 9:
return False
try:
d = int(identifier[-7:])
if d == 0:
return False
except ValueError:
return False
# TODO This is not correct for entity types that are not Coops
if identifier[:-7] not in ('CP', 'XCP', 'BC'):
return False
return True
| 42.192913 | 120 | 0.675376 |
acf62a297b43ad133ff98ad0fea14abb1fa90f08 | 784 | py | Python | ncl/simplecondition.py | MichaelBittencourt/NCL-Generator-API | 8eecf2ea4948354fae9f64b68da2f24ad0663d60 | [
"MIT"
] | 1 | 2020-06-26T09:59:27.000Z | 2020-06-26T09:59:27.000Z | ncl/simplecondition.py | MichaelBittencourt/NCL-Generator-API | 8eecf2ea4948354fae9f64b68da2f24ad0663d60 | [
"MIT"
] | null | null | null | ncl/simplecondition.py | MichaelBittencourt/NCL-Generator-API | 8eecf2ea4948354fae9f64b68da2f24ad0663d60 | [
"MIT"
] | 1 | 2020-01-07T23:16:11.000Z | 2020-01-07T23:16:11.000Z | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2019 Michael Bittencourt <mchl.bittencourt@gmail.com>
#
# Distributed under terms of the MIT license.
"""
"""
from ncl.condition import Condition
from ncl.simple import Simple
class SimpleCondition(Simple, Condition):
def __init__(self, role, delay=None, eventType=None, key=None, transition=None, min=None, max=None, qualifier=None):
super().__init__(role, eventType, delay, min, max, qualifier)
listAttributes = ["key", "transition"]
self._setTagName("simpleCondition")
self._appendAttributes(listAttributes)
if key is not None:
self.set("key", key)
if transition is not None:
self.set("transition", transition)
pass
| 28 | 120 | 0.667092 |
acf62a52ba09377d4ff8b3e79e5a9ed11b8ab719 | 720 | py | Python | services/core-api/app/api/now_applications/models/unit_type.py | parc-jason/mds | 8f181a429442208a061ed72065b71e6c2bd0f76f | [
"Apache-2.0"
] | null | null | null | services/core-api/app/api/now_applications/models/unit_type.py | parc-jason/mds | 8f181a429442208a061ed72065b71e6c2bd0f76f | [
"Apache-2.0"
] | null | null | null | services/core-api/app/api/now_applications/models/unit_type.py | parc-jason/mds | 8f181a429442208a061ed72065b71e6c2bd0f76f | [
"Apache-2.0"
] | null | null | null | import uuid, datetime
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import validates
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.schema import FetchedValue
from app.extensions import db
from app.api.utils.models_mixins import AuditMixin, Base
class UnitType(AuditMixin, Base):
__tablename__ = 'unit_type'
unit_type_code = db.Column(db.String, primary_key=True)
description = db.Column(db.String, nullable=False)
short_description = db.Column(db.String, nullable=False)
active_ind = db.Column(db.Boolean, nullable=False, server_default=FetchedValue())
@classmethod
def get_active(cls):
return cls.query.filter_by(active_ind=True).all()
| 32.727273 | 85 | 0.777778 |
acf62b2ed0139c0b3c90780755fce3c1296d7ada | 832 | py | Python | sdks/python/test/test_Branch.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | null | null | null | sdks/python/test/test_Branch.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 6 | 2019-10-23T06:38:53.000Z | 2022-01-22T07:57:58.000Z | sdks/python/test/test_Branch.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 2 | 2019-10-23T06:31:05.000Z | 2021-08-21T17:32:47.000Z | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import unittest
import appcenter_sdk
from Branch.clsBranch import Branch # noqa: E501
from appcenter_sdk.rest import ApiException
class TestBranch(unittest.TestCase):
"""Branch unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testBranch(self):
"""Test Branch"""
# FIXME: construct object with mandatory attributes with example values
# model = appcenter_sdk.models.clsBranch.Branch() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 20.8 | 79 | 0.6875 |
acf62b2ef7fc69950091125932c001351541a1e8 | 922 | py | Python | IA/Python/8/8.1/7.py | worthl3ss/random-small | ffb60781f57eb865acbd81aaa07056046bad32fe | [
"MIT"
] | 1 | 2022-02-23T12:47:00.000Z | 2022-02-23T12:47:00.000Z | IA/Python/8/8.1/7.py | worthl3ss/random-small | ffb60781f57eb865acbd81aaa07056046bad32fe | [
"MIT"
] | null | null | null | IA/Python/8/8.1/7.py | worthl3ss/random-small | ffb60781f57eb865acbd81aaa07056046bad32fe | [
"MIT"
] | null | null | null | class Player:
MAX_LEVEL = 40
def __init__(self, x, y):
self.name = x
self.health = y
self.level = 1
def __str__(self):
return "<{}:{}, {} HP>".format(self.name, self.level, self.health)
def __repr__(self):
return "<{}:{}, {} HP>".format(self.name, self.level, self.health)
def find_strongest(l):
strongest = l[0]
for p in l:
if p.level>strongest.level:
strongest=p
return p
def compare(p1, p2):
if p1.level > p2.level:
return 1
if p1.level < p2.level:
return -1
return 0
def take_hit(self, damage):
self.health-=damage
def heal(self, amount):
self.health+=amount
def level_up(self):
self.level+=1
self.health = 100
def describe(self):
print("<{}:{}, {} HP>".format(self.name, self.level, self.health))
| 28.8125 | 74 | 0.52603 |
acf62bf0058bc4eeae41aef905052f2d2a88013a | 3,073 | py | Python | code/arrowhead/consumer.py | kottz/D7039E | d86848a037a07e97122c92e3c80c980c58c41d52 | [
"MIT"
] | null | null | null | code/arrowhead/consumer.py | kottz/D7039E | d86848a037a07e97122c92e3c80c980c58c41d52 | [
"MIT"
] | 72 | 2020-09-15T13:32:02.000Z | 2021-01-01T08:06:16.000Z | code/arrowhead/consumer.py | kottz/D7039E | d86848a037a07e97122c92e3c80c980c58c41d52 | [
"MIT"
] | 1 | 2020-11-16T16:06:15.000Z | 2020-11-16T16:06:15.000Z | import json
from arrowhead_core_systems import Arrowhead_system
from requests_pkcs12 import get, post
import time
# TODO add code from core_systems.py to take care of all arrowhead logic.
with open("config_template.json") as json_file:
config = json.load(json_file)[0]
consumer_json = config["consumer_json"]
consumer_name = config["consumer_json"]["systemName"]
provider_json = config["provider_json"]
provider_name = config["provider_json"]["systemName"]
pick_up_service_json = config["pick_up_service_json"]
pick_up_service_definition = config["pick_up_service_json"]["serviceDefinition"]
place_service_json = config["place_service_json"]
place_service_definition = config["place_service_json"]["serviceDefinition"]
direction_service_json = config["direction_service_json"]
direction_service_definition = config["direction_service_json"]["serviceDefinition"]
test_consumer = Arrowhead_system(
"/home/albin/Documents/core-java-spring/certificates/testcloud2/sysop.p12", "123456")
test_consumer.register_system(consumer_json)
test_consumer.register_service(pick_up_service_json)
test_consumer.add_intracloud_authorization(pick_up_service_definition)
test_consumer.create_orchestration_store_entry(pick_up_service_definition)
test_consumer.register_service(place_service_json)
test_consumer.add_intracloud_authorization(place_service_definition)
test_consumer.create_orchestration_store_entry(place_service_definition)
test_consumer.register_service(direction_service_json)
test_consumer.add_intracloud_authorization(direction_service_definition)
test_consumer.create_orchestration_store_entry(direction_service_definition)
test_consumer.start_orchestration_based_on_id(
consumer_name, provider_name).json()
provider_ip = test_consumer.start_orchestration_based_on_id(
consumer_name, provider_name).json()["response"][0]["provider"]["address"]
proivder_port = test_consumer.start_orchestration_based_on_id(
consumer_name, provider_name).json()["response"][0]["provider"]["port"]
service_uri_pick_up = test_consumer.start_orchestration_based_on_id(
consumer_name, provider_name).json()["response"][0]["serviceUri"]
service_uri_place = test_consumer.start_orchestration_based_on_id(
consumer_name, provider_name).json()["response"][1]["serviceUri"]
service_uri_direction = test_consumer.start_orchestration_based_on_id(
consumer_name, provider_name).json()["response"][2]["serviceUri"]
url = 'http://' + provider_ip + ":" + str(proivder_port)
# TODO flask api
direction_json = {
'direction': "left"
}
# print(post(url + service_uri_pick_up,
# verify=False, json=json
# ).json())
print(post(url + service_uri_direction,
verify=False, json=direction_json
).json())
while(True):
direction_keyboard_input = input("arrowhead input requested. What to do? ")
direction_json = {
'direction': direction_keyboard_input
}
print(post(url + service_uri_direction,
verify=False, json=direction_json
).json())
| 40.973333 | 89 | 0.785552 |
acf62ec9b10ebac4597f56259145437c9425b95e | 2,491 | py | Python | dev/patch_doctest.py | odidev/fastnumbers | 1d27d2fe21d981028a4dfa576b0ba93074bb369d | [
"MIT"
] | 84 | 2015-03-21T19:10:17.000Z | 2022-02-21T04:14:54.000Z | dev/patch_doctest.py | odidev/fastnumbers | 1d27d2fe21d981028a4dfa576b0ba93074bb369d | [
"MIT"
] | 49 | 2015-05-21T16:47:13.000Z | 2022-03-15T05:23:40.000Z | dev/patch_doctest.py | odidev/fastnumbers | 1d27d2fe21d981028a4dfa576b0ba93074bb369d | [
"MIT"
] | 13 | 2016-08-27T02:45:20.000Z | 2022-02-27T14:32:19.000Z | """
Copies doctest.py from the stdlib to the current directory,
and modifies it so that
a) It will load "*.so" files as modules just like a "*.py" file
b) It recognizes functions defined in "*.so" files
c) Remove the configuration extension from the "*.so" files (on Python3)
With these enhancements, doctest can be run on a C python extension module.
"""
from __future__ import print_function
import doctest
import inspect
import sys
# Get the source file location
dt_location = inspect.getsourcefile(doctest)
# Read the module into memory
with open(dt_location) as fl:
doctest_str = fl.read()
# Let's add the glob module.
# Also define a function to detect if we could import this module name.
doctest_str = doctest_str.replace(
"import __future__",
"import __future__\nimport glob\n"
"def can_import_module(name):\n"
" try:\n"
" __import__(name)\n"
" except ImportError:\n"
" return False\n"
" else:\n"
" return True\n",
)
# Add a search for the .so extension when inspecting the input files
# so that extension modules will be loaded properly.
doctest_str = doctest_str.replace(
'if filename.endswith(".py"):',
'if filename.endswith((".py", ".so", ".pyd")) or can_import_module(filename):',
)
# inspect.isfunction does not work for functions written in C,
# so we have to replace that with an inspect.isbuiltin check when
# looking for functions with docstrings.
doctest_str = doctest_str.replace(
"if ((inspect.isfunction(val) or inspect.isclass(val)) and",
"if ((inspect.isfunction(val) or inspect.isbuiltin(val) "
"or inspect.isclass(val)) and",
)
# Replace the configuration extension with nothing on Python3
if sys.version[0] == "3":
doctest_str = doctest_str.replace(
"m = __import__(filename[:-3])",
'm = __import__(filename[:-3] if filename.endswith(".py") '
'else filename.replace(get_config_var("EXT_SUFFIX"), ""))',
)
# We need to import the get_config_var variable.
doctest_str = doctest_str.replace(
"def _test():", "from sysconfig import get_config_var\ndef _test():"
)
else:
doctest_str = doctest_str.replace(
"m = __import__(filename[:-3])",
"m = __import__(filename[:-3] "
'if filename.endswith((".py", ".so", ".pyd")) else filename)',
)
# Open up the new output file and write the modified input to it.
with open("doctest.py", "w") as fl:
print(doctest_str, file=fl, end="")
| 33.213333 | 83 | 0.678442 |
acf62ee3fefab4057d04bc8fe969df03b13221f2 | 3,870 | py | Python | tests/components/wallbox/test_config_flow.py | yeahme49/core | e75e40f26e84e7ce04993afef7046cec4f40a11a | [
"Apache-2.0"
] | 5 | 2017-01-26T16:33:09.000Z | 2018-07-20T13:50:47.000Z | tests/components/wallbox/test_config_flow.py | yeahme49/core | e75e40f26e84e7ce04993afef7046cec4f40a11a | [
"Apache-2.0"
] | 66 | 2020-08-05T07:21:39.000Z | 2022-03-31T06:02:16.000Z | tests/components/wallbox/test_config_flow.py | frankhildebrandt/home-assistant | fd45a07677495d2c92467011e93b3e0492185ed2 | [
"Apache-2.0"
] | 2 | 2021-03-02T10:36:33.000Z | 2021-04-10T07:57:03.000Z | """Test the Wallbox config flow."""
from http import HTTPStatus
import json
import requests_mock
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.wallbox import config_flow
from homeassistant.components.wallbox.const import DOMAIN
from homeassistant.core import HomeAssistant
test_response = json.loads(
'{"charging_power": 0,"max_available_power": 25,"charging_speed": 0,"added_range": 372,"added_energy": 44.697}'
)
async def test_show_set_form(hass: HomeAssistant) -> None:
"""Test that the setup form is served."""
flow = config_flow.ConfigFlow()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_form_cannot_authenticate(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
text='{"jwt":"fakekeyhere","user_id":12345,"ttl":145656758,"error":false,"status":200}',
status_code=HTTPStatus.FORBIDDEN,
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"station": "12345",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
text='{"jwt":"fakekeyhere","user_id":12345,"ttl":145656758,"error":false,"status":200}',
status_code=HTTPStatus.OK,
)
mock_request.get(
"https://api.wall-box.com/chargers/status/12345",
text='{"Temperature": 100, "Location": "Toronto", "Datetime": "2020-07-23", "Units": "Celsius"}',
status_code=HTTPStatus.NOT_FOUND,
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"station": "12345",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_validate_input(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with requests_mock.Mocker() as mock_request:
mock_request.get(
"https://api.wall-box.com/auth/token/user",
text='{"jwt":"fakekeyhere","user_id":12345,"ttl":145656758,"error":false,"status":200}',
status_code=HTTPStatus.OK,
)
mock_request.get(
"https://api.wall-box.com/chargers/status/12345",
text='{"Temperature": 100, "Location": "Toronto", "Datetime": "2020-07-23", "Units": "Celsius"}',
status_code=HTTPStatus.OK,
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"station": "12345",
"username": "test-username",
"password": "test-password",
},
)
assert result2["title"] == "Wallbox Portal"
assert result2["data"]["station"] == "12345"
| 35.181818 | 115 | 0.611886 |
acf62ee6602771749289115c805012176640625d | 1,633 | py | Python | SprityBird/spritybird/python3.5/lib/python3.5/site-packages/healpy/cookbook.py | MobileAnalytics/iPython-Framework | da0e598308c067cd5c5290a6364b3ffaf2d2418f | [
"MIT"
] | 4 | 2018-07-04T17:20:12.000Z | 2019-07-14T18:07:25.000Z | SprityBird/spritybird/python3.5/lib/python3.5/site-packages/healpy/cookbook.py | MobileAnalytics/iPython-Framework | da0e598308c067cd5c5290a6364b3ffaf2d2418f | [
"MIT"
] | null | null | null | SprityBird/spritybird/python3.5/lib/python3.5/site-packages/healpy/cookbook.py | MobileAnalytics/iPython-Framework | da0e598308c067cd5c5290a6364b3ffaf2d2418f | [
"MIT"
] | 1 | 2018-09-03T03:02:06.000Z | 2018-09-03T03:02:06.000Z | """Various generic useful functions
"""
def is_seq(o):
"""Check if the object is a sequence.
Parameters
----------
o : any object
The object to check
Returns
-------
is_seq : bool, scalar
True if *o* is a sequence, False otherwise
"""
return hasattr(o, '__len__')
def is_seq_of_seq(o):
"""Check if the object is a sequence of sequences. No check is done on
the sizes of the sequences.
Parameters
----------
o : any object
The object to check
Returns
-------
is_seq_of_seq : bool
True if *o* is a sequence of sequences, False otherwise.
"""
if not is_seq(o):
return False
for s in o:
if not is_seq(s):
return False
return True
def is_like2d(o):
"""Check if *o* is conformable to a 2d array.
Parameters
----------
o : any object
The object to check
Returns
-------
is_like2d : bool, scalar
True if *o* is conformable to a 2d array, False otherwise.
"""
if not is_seq(o):
return False
size = None
for s in o:
if not is_seq(s):
return False
if size is None:
size = len(s)
if len(s) != size:
return False
return True
def len_array_or_arrays(o):
"""Returns the length of a single array or list of arrays
Parameters
----------
o : either array or sequence of arrays
Returns
-------
length : length of array
"""
if is_seq_of_seq(o):
length = len(o[0])
else:
length = len(o)
return length
| 19.914634 | 74 | 0.541335 |
acf62f5b6df0f471fdfe9e43cece118949729f35 | 2,235 | py | Python | {{cookiecutter.dir_name}}/app/forms.py | Raniita/cookiecutter-flask-yatf | 29f76bfb0dfa5ef9a5cb4f3236c665cf511b8a8b | [
"RSA-MD"
] | 3 | 2021-07-14T11:09:38.000Z | 2021-07-22T22:36:45.000Z | {{cookiecutter.dir_name}}/app/forms.py | Raniita/yes-another-flask-template | 29f76bfb0dfa5ef9a5cb4f3236c665cf511b8a8b | [
"RSA-MD"
] | null | null | null | {{cookiecutter.dir_name}}/app/forms.py | Raniita/yes-another-flask-template | 29f76bfb0dfa5ef9a5cb4f3236c665cf511b8a8b | [
"RSA-MD"
] | null | null | null | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SubmitField
from wtforms.validators import DataRequired, Email, EqualTo, Length, Optional
class SignupForm(FlaskForm):
""" User sign-up Form """
name = StringField(
'Name',
validators=[DataRequired()]
)
email = StringField(
'Email',
validators=[
Length(min=6),
Email(message='Enter a valid email.'),
DataRequired()
]
)
password = PasswordField(
'Password',
validators=[
DataRequired(),
Length(min=6, message='Select a stronger password.')
]
)
confirm = PasswordField(
'Confirm your Password',
validators=[
DataRequired(),
EqualTo('password', message='Password must match.')
]
)
website = StringField(
'Website',
validators=[Optional()]
)
submit = SubmitField('Register')
class LoginForm(FlaskForm):
""" User Log-in Form. """
email = StringField(
'Email',
validators=[
DataRequired(),
Email(message='Enter a valid email.')
]
)
password = PasswordField(
'Password',
validators=[DataRequired()]
)
submit = SubmitField('Log In')
class ChangePasswordForm(FlaskForm):
""" User change password """
password = PasswordField(
'Password',
validators=[
DataRequired(),
Length(min=6, message='Select a stronger password.')
]
)
confirm = PasswordField(
'Confirm your Password',
validators=[
DataRequired(),
EqualTo('password', message='Password must match.')
]
)
new_password = PasswordField(
'New Password',
validators=[
DataRequired(),
Length(min=6, message='Select a stronger password.')
]
)
confirm_new = PasswordField(
'Confirm your Password',
validators=[
DataRequired(),
EqualTo('new_password', message='Password must match.')
]
)
submit = SubmitField('Change actual user password')
| 22.35 | 77 | 0.55123 |
acf6301f0c71ff30eb4b1c3959d2733fddc4506b | 5,331 | py | Python | djangosaml2/conf.py | Quarticai/djangosaml2 | f4f5977d40aef13462e607379207f26c86572178 | [
"Apache-2.0"
] | null | null | null | djangosaml2/conf.py | Quarticai/djangosaml2 | f4f5977d40aef13462e607379207f26c86572178 | [
"Apache-2.0"
] | 25 | 2020-09-16T07:18:09.000Z | 2021-11-03T05:35:25.000Z | djangosaml2/conf.py | Quarticai/djangosaml2 | f4f5977d40aef13462e607379207f26c86572178 | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2010-2012 Yaco Sistemas (http://www.yaco.es)
# Copyright (C) 2009 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import os
from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
import saml2
from .utils import get_custom_setting
from deming.models import SamlConfig
from deming.constants_utils import ATTRIBUTES_MAP_FILENAME
def get_config_loader(path, request=None):
i = path.rfind('.')
module, attr = path[:i], path[i + 1:]
try:
mod = import_module(module)
except ImportError as e:
raise ImproperlyConfigured(
'Error importing SAML config loader %s: "%s"' % (path, e))
except ValueError as e:
raise ImproperlyConfigured(
'Error importing SAML config loader. Is SAML_CONFIG_LOADER '
'a correctly string with a callable path?'
)
try:
config_loader = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured(
'Module "%s" does not define a "%s" config loader' %
(module, attr)
)
if not hasattr(config_loader, '__call__'):
raise ImproperlyConfigured(
"SAML config loader must be a callable object.")
return config_loader
def config_map(data):
"""Maps the pysaml2 configuration keys to db values.
Returns a dict containing the pysaml2 configuration for the loader.
"""
HOME_DIR = os.environ.get('HOME', '')
BASE_DIR = os.path.join(HOME_DIR, settings.MEDIA_ROOT)
attribute_dir_path = data.attributes_dir.name.split(ATTRIBUTES_MAP_FILENAME)[0]
return {
# full path to the xmlsec1 binary programm
'xmlsec_binary': '/usr/bin/xmlsec1',
# your entity id, usually your subdomain plus the url to the metadata view
'entityid': data.sp_entity_id,
# directory with attribute mapping
"attribute_map_dir": os.path.join(BASE_DIR, attribute_dir_path),
# this block states what services we provide
'service': {
# we are just a lonely SP
'sp': {
'allow_unsolicited': True,
# 'name': 'Federated Django sample SP',
'name_id_format': getattr(saml2.saml, data.name_id_format,
saml2.saml.NAMEID_FORMAT_UNSPECIFIED),
# For Okta add signed logout requets. Enable this:
# "logout_requests_signed": True,
'endpoints': {
# url and binding to the assertion consumer service view
# do not change the binding or service name
'assertion_consumer_service': [
(data.acs_uri,
saml2.BINDING_HTTP_POST),
],
# url and binding to the single logout service view
# do not change the binding or service name
'single_logout_service': [
(data.single_logout_service_uri_redirect,
saml2.BINDING_HTTP_REDIRECT),
(data.single_logout_service_uri_post,
saml2.BINDING_HTTP_POST),
],
},
},
},
# where the remote metadata is stored, local, remote or mdq server.
# One metadatastore or many ...
'metadata': {
'local': [os.path.join(BASE_DIR, data.metadata_file.name)],
},
'debug': 1,
# Signing
'key_file': os.path.join(BASE_DIR, data.sp_key_file.name), # private part
'cert_file': os.path.join(BASE_DIR, data.sp_certificate_file.name), # public part
# Encryption
'encryption_keypairs': [{
'key_file': os.path.join(BASE_DIR, data.sp_key_file.name), # private part
'cert_file': os.path.join(BASE_DIR, data.sp_certificate_file.name), # public part
}]
}
def config_settings_loader(request=None):
"""Utility function to load the pysaml2 configuration.
This is also the default config loader.
"""
conf = saml2.config.SPConfig()
saml_config_data = SamlConfig.objects.first()
saml_config = config_map(saml_config_data)
# conf.load(copy.deepcopy(settings.SAML_CONFIG))
# loading configuration from db instead of settings.py file
conf.load(saml_config)
return conf
def get_config(config_loader_path=None, request=None):
config_loader_path = config_loader_path or get_custom_setting(
'SAML_CONFIG_LOADER', 'djangosaml2.conf.config_settings_loader')
config_loader = get_config_loader(config_loader_path)
return config_loader(request)
| 36.02027 | 94 | 0.633465 |
acf63188630d9969eff41ff958febfbc24a8ea49 | 3,929 | py | Python | sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_operations.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_operations.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 226 | 2019-07-24T07:57:21.000Z | 2019-10-15T01:07:24.000Z | sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_operations.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from .. import models
class Operations(object):
"""Operations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Specifies the API version to use for this request. Constant value: "2016-11-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-11-01"
self.config = config
def list(
self, custom_headers=None, raw=False, **operation_config):
"""Returns the list of operations supported by the import/export resource
provider.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Operation
:rtype:
~azure.mgmt.storageimportexport.models.OperationPaged[~azure.mgmt.storageimportexport.models.Operation]
:raises:
:class:`ErrorResponseException<azure.mgmt.storageimportexport.models.ErrorResponseException>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url']
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['Accept-Language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.OperationPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list.metadata = {'url': '/providers/Microsoft.ImportExport/operations'}
| 38.519608 | 144 | 0.638076 |
acf631fc8f90f16b50e63210f2869e0ff47cb4c3 | 936 | py | Python | 3.3/get-pip.py | bloomonkey/get-pip | 57b648ef9b98f7324b8cdc61d68966cad0a2473f | [
"MIT"
] | 1 | 2021-05-04T10:53:13.000Z | 2021-05-04T10:53:13.000Z | 3.3/get-pip.py | bloomonkey/get-pip | 57b648ef9b98f7324b8cdc61d68966cad0a2473f | [
"MIT"
] | null | null | null | 3.3/get-pip.py | bloomonkey/get-pip | 57b648ef9b98f7324b8cdc61d68966cad0a2473f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from __future__ import print_function
import sys
import textwrap
message = """
Hi there!
The URL you are using to fetch this script has changed, and this one will no
longer work. Please use get-pip.py from the following URL instead:
https://bootstrap.pypa.io/pip/3.3/get-pip.py
Sorry if this change causes any inconvenience for you!
We don't have a good mechanism to make more gradual changes here, and this
renaming is a part of an effort to make it easier to us to update these
scripts, when there's a pip release. It's also essential for improving how we
handle the `get-pip.py` scripts, when pip drops support for a Python minor
version.
There are no more renames/URL changes planned, and we don't expect that a need
would arise to do this again in the near future.
Thanks for understanding!
- Pradyun, on behalf of the volunteers who maintain pip.
"""
print(message, file=sys.stderr)
sys.exit(1)
| 29.25 | 78 | 0.766026 |
acf632731e3c942c63a577348ab66a40ca4f7661 | 1,358 | py | Python | networking_odl/db/migration/alembic_migrations/versions/pike/expand/0472f56ff2fb_add_journal_dependencies_table.py | gokarslan/networking-odl2 | 6a6967832b2c02dfcff6a9f0ab6e36472b849ce8 | [
"Apache-2.0"
] | null | null | null | networking_odl/db/migration/alembic_migrations/versions/pike/expand/0472f56ff2fb_add_journal_dependencies_table.py | gokarslan/networking-odl2 | 6a6967832b2c02dfcff6a9f0ab6e36472b849ce8 | [
"Apache-2.0"
] | null | null | null | networking_odl/db/migration/alembic_migrations/versions/pike/expand/0472f56ff2fb_add_journal_dependencies_table.py | gokarslan/networking-odl2 | 6a6967832b2c02dfcff6a9f0ab6e36472b849ce8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Add journal dependencies table
Revision ID: 0472f56ff2fb
Revises: 43af357fd638
Create Date: 2017-04-02 11:02:01.622548
"""
# revision identifiers, used by Alembic.
revision = '0472f56ff2fb'
down_revision = '43af357fd638'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'opendaylight_journal_deps',
sa.Column('depends_on', sa.BigInteger(),
sa.ForeignKey('opendaylightjournal.seqnum',
ondelete='CASCADE'),
primary_key=True),
sa.Column('dependent', sa.BigInteger(),
sa.ForeignKey('opendaylightjournal.seqnum',
ondelete='CASCADE'),
primary_key=True))
| 31.581395 | 78 | 0.658321 |
acf632888fa3465f8cdc8a7f55e1539b924a8043 | 304 | py | Python | novo/config/docs.py | erpnextdeveloper/novo | 9d41d67f49f246b4347e8de67c36844a70fc9895 | [
"MIT"
] | null | null | null | novo/config/docs.py | erpnextdeveloper/novo | 9d41d67f49f246b4347e8de67c36844a70fc9895 | [
"MIT"
] | null | null | null | novo/config/docs.py | erpnextdeveloper/novo | 9d41d67f49f246b4347e8de67c36844a70fc9895 | [
"MIT"
] | null | null | null | """
Configuration for docs
"""
# source_link = "https://github.com/[org_name]/novo"
# docs_base_url = "https://[org_name].github.io/novo"
# headline = "App that does everything"
# sub_heading = "Yes, you got that right the first time, everything"
def get_context(context):
context.brand_html = "Novo"
| 25.333333 | 68 | 0.713816 |
acf6338bf32e251413a89f8dcf56233abcc216cd | 5,748 | py | Python | remoteappmanager/docker/container.py | fossabot/simphony-remote | 2f3c2d0b7e3b5b3b6a747c929ba6224f622bab33 | [
"BSD-3-Clause"
] | null | null | null | remoteappmanager/docker/container.py | fossabot/simphony-remote | 2f3c2d0b7e3b5b3b6a747c929ba6224f622bab33 | [
"BSD-3-Clause"
] | null | null | null | remoteappmanager/docker/container.py | fossabot/simphony-remote | 2f3c2d0b7e3b5b3b6a747c929ba6224f622bab33 | [
"BSD-3-Clause"
] | 1 | 2021-08-12T09:08:36.000Z | 2021-08-12T09:08:36.000Z | from remoteappmanager.docker.docker_labels import SIMPHONY_NS_RUNINFO
from traitlets import Unicode, HasTraits, Int, validate, TraitError
class Container(HasTraits):
"""Class representing a container.
Note that its existence just describes a container.
It does not imply that the associated container is still
running, registered, or anything"""
#: The docker id of the container
docker_id = Unicode()
#: The practical name of the container
name = Unicode()
#: The image name
image_name = Unicode()
#: And the image docker id
image_id = Unicode()
#: Mapping identifier
mapping_id = Unicode()
#: The ip address...
ip = Unicode()
#: ...and port where the container service will be listening
port = Int(80)
#: The id that will go in the URL of the container.
#: This is a de-facto replacement for the container docker id. The reason
#: why we don't use that instead is because the container id is difficult
#: to obtain reliably from inside the container, and because we want more
#: flexibility in the form of the user-exposed id.
#: Important: must be globally unique, not just per-user unique.
url_id = Unicode()
#: The user currently running the container
user = Unicode()
#: The url path of the container as it is exported to the user.
#: e.g. "/home/test/containers/12345"
#: Must not have an end slash.
urlpath = Unicode()
# The docker realm under which the container is running.
realm = Unicode()
@validate("urlpath")
def _urlpath_validate(self, proposal):
if proposal['value'].endswith('/'):
raise TraitError("urlpath cannot end with a /")
return proposal['value']
@property
def host_url(self):
"""Returns the docker host where this server can be reached,
in url form."""
return "http://{ip}:{port}".format(
ip=self.ip,
port=self.port,
)
def __repr__(self):
return (
'<Container(' +
", ".join(
"{}={}".format(name, getattr(self, name))
for name in self.trait_names()
) +
")>")
@classmethod
def from_docker_dict(cls, docker_dict):
"""Returns a Container object with the info given by a
docker Client.
Parameters
----------
docker_dict : dict
One item from the result of docker.Client.containers
Returns
-------
container : remoteappmanager.docker.container.Container
Examples
--------
>>> # containers is a list of dict
>>> containers = docker.Client().containers()
>>> Container.from_docker_dict(containers[0])
"""
is_inspect_container_output = ("Config" in docker_dict)
kwargs = dict(
docker_id=docker_dict.get('Id') or '',
ip=cls.ip.default_value,
port=cls.port.default_value,
)
if is_inspect_container_output:
# It's a client.inspect_container() output
network_settings = docker_dict.get("NetworkSettings") or {}
ports = network_settings.get("Ports") or {}
# unfortunately, in the case of a running container, we don't have
# a single list. Instead, we have a dict where the keys are
# the "port identifier" (e.g. 8888/tcp) and the value is a list
# of dictionaries.
# We assume that we only have one, as above
if len(ports) > 1:
raise ValueError("Container Ports had more than one element.")
if len(ports):
port_values = list(ports.values())[0]
if len(port_values) > 1:
raise ValueError("Container Ports values had "
"more than one element.")
if len(port_values):
kwargs["ip"] = port_values[0].get("HostIp") or kwargs["ip"]
kwargs["port"] = int(port_values[0].get("HostPort") or
kwargs["port"])
config = docker_dict.get("Config", {})
labels = config.get("Labels")
kwargs["image_name"] = config.get("Image")
kwargs["image_id"] = docker_dict["Image"]
kwargs["name"] = docker_dict["Name"]
else:
# It's a client.containers() output, so we have different rules.
ports = docker_dict.get('Ports') or []
if len(ports) > 1:
raise ValueError("Container Ports had more than one element.")
if len(ports):
kwargs["ip"] = ports[0].get('IP') or kwargs["ip"]
kwargs["port"] = int(ports[0].get('PublicPort') or
kwargs["port"])
labels = docker_dict.get("Labels") or {}
kwargs["image_name"] = docker_dict.get('Image') or ''
kwargs["image_id"] = docker_dict.get("ImageID") or ''
names = docker_dict.get("Names") or ('', )
kwargs["name"] = names[0]
kwargs["mapping_id"] = labels.get(SIMPHONY_NS_RUNINFO.mapping_id) or ""
kwargs["url_id"] = labels.get(SIMPHONY_NS_RUNINFO.url_id) or ""
kwargs["user"] = labels.get(SIMPHONY_NS_RUNINFO.user) or ""
kwargs["urlpath"] = labels.get(SIMPHONY_NS_RUNINFO.urlpath) or ""
kwargs["realm"] = labels.get(SIMPHONY_NS_RUNINFO.realm) or ""
try:
return cls(**kwargs)
except TraitError as e:
raise ValueError(
"Data does not satisfy trait constraints. "
"{}.".format(e))
| 34.836364 | 79 | 0.569415 |
acf6340fa9efd1bdb4c1031cd271fff37a5bc39a | 8,761 | py | Python | travis/upload_build_results_to_gcs.py | Dazbeni/grr | 5b49a83eba2f84e346a2b50d154264c190a24f08 | [
"Apache-2.0"
] | 1 | 2020-06-25T14:25:51.000Z | 2020-06-25T14:25:51.000Z | travis/upload_build_results_to_gcs.py | Dazbeni/grr | 5b49a83eba2f84e346a2b50d154264c190a24f08 | [
"Apache-2.0"
] | 3 | 2021-05-11T20:18:38.000Z | 2022-03-02T09:33:56.000Z | travis/upload_build_results_to_gcs.py | Dazbeni/grr | 5b49a83eba2f84e346a2b50d154264c190a24f08 | [
"Apache-2.0"
] | 1 | 2020-06-25T14:25:54.000Z | 2020-06-25T14:25:54.000Z | #!/usr/bin/env python
# python3
"""Tool used by Travis to upload build artifacts to the Google Cloud.
This tool also triggers Appveyor builds for certain Travis jobs after
results are uploaded.
"""
import datetime
import os
import shutil
import subprocess
import tempfile
from absl import app
from absl import flags
from absl import logging
import requests
from google.cloud import storage
flags.DEFINE_string("encrypted_service_key", "",
"Path to Travis's GCP service account key.")
flags.DEFINE_string("build_results_dir", "",
"Path to the local directory containing build results.")
# Environment variables.
_TRAVIS_COMMIT = "TRAVIS_COMMIT"
_TRAVIS_BRANCH = "TRAVIS_BRANCH"
_TRAVIS_JOB_NUMBER = "TRAVIS_JOB_NUMBER"
_SERVICE_FILE_ENCRYPTION_KEY_VAR = "SERVICE_FILE_ENCRYPTION_KEY_VAR"
_SERVICE_FILE_ENCRYPTION_IV_VAR = "SERVICE_FILE_ENCRYPTION_IV_VAR"
_GCS_BUCKET = "GCS_BUCKET"
_GCS_TAG = "GCS_TAG"
_APPVEYOR_ACCOUNT_NAME = "APPVEYOR_ACCOUNT_NAME"
_APPVEYOR_TOKEN = "APPVEYOR_TOKEN"
_APPVEYOR_WINDOWS_TEMPLATES_SLUG = "APPVEYOR_WINDOWS_TEMPLATES_SLUG"
_APPVEYOR_E2E_TESTS_SLUG = "APPVEYOR_E2E_TESTS_SLUG"
_APPVEYOR_DOCKER_BUILD_SLUG = "APPVEYOR_DOCKER_BUILD_SLUG"
# Other constants.
_DECRYPTED_SERVICE_FILE_NAME = "travis_uploader_service_account.json"
_GCS_BUCKET_TIME_FORMAT = "%Y-%m-%dT%H:%MUTC"
_UBUNTU_64BIT_TAG = "ubuntu_64bit"
_SERVER_DEB_TAG = "server_deb"
_APPVEYOR_API_URL = "https://ci.appveyor.com/api/builds"
_REDACTED_SECRET_PLACEHOLDER = "**REDACTED SECRET**"
_LATEST_SERVER_DEB_GCS_DIR = "_latest_server_deb"
class DecryptionError(Exception):
"""Raised when a problem occurs when trying to decrypt the GCP key."""
class AppveyorError(Exception):
"""Raised when a problem occurs when trying to communicate with Appveyor."""
class GCSUploadError(Exception):
"""Generic exception raised when an error occurs during upload of results."""
def _GetRedactedExceptionMessage(exception: Exception) -> str:
"""Returns the message for an exception after redacting sensitive info."""
service_file_encryption_key_var = os.environ[_SERVICE_FILE_ENCRYPTION_KEY_VAR]
service_file_encryption_iv_var = os.environ[_SERVICE_FILE_ENCRYPTION_IV_VAR]
original_message = str(exception)
redacted_message = original_message.replace(os.environ[_APPVEYOR_TOKEN],
_REDACTED_SECRET_PLACEHOLDER)
redacted_message = redacted_message.replace(
os.environ[service_file_encryption_key_var], _REDACTED_SECRET_PLACEHOLDER)
redacted_message = redacted_message.replace(
os.environ[service_file_encryption_iv_var], _REDACTED_SECRET_PLACEHOLDER)
return redacted_message
def _GetGCSBuildResultsDir() -> str:
"""Returns the GCS blob prefix for build results."""
git_output = subprocess.check_output(
["git", "show", "-s", "--format=%ct", os.environ[_TRAVIS_COMMIT]])
try:
commit_timestamp = int(git_output.decode("utf-8").strip())
except ValueError:
raise ValueError(
"Received invalid response from git: {}.".format(git_output))
formatted_commit_timestamp = datetime.datetime.utcfromtimestamp(
commit_timestamp).strftime(_GCS_BUCKET_TIME_FORMAT)
destination_dir = ("{commit_timestamp}_{travis_commit}/"
"travis_job_{travis_job_number}_{gcs_tag}").format(
commit_timestamp=formatted_commit_timestamp,
travis_commit=os.environ[_TRAVIS_COMMIT],
travis_job_number=os.environ[_TRAVIS_JOB_NUMBER],
gcs_tag=os.environ[_GCS_TAG])
return destination_dir
def _DecryptGCPServiceFileTo(service_file_path: str):
"""Decrypts Travis's GCP service account key to the given location.
More information about decrypting files on Travis can be found in
https://docs.travis-ci.com/user/encrypting-files/
Args:
service_file_path: Full path of the decrypted JSON file to generate.
Raises:
DecryptionError: If decryption fails.
"""
key_var_name = os.environ[_SERVICE_FILE_ENCRYPTION_KEY_VAR]
iv_var_name = os.environ[_SERVICE_FILE_ENCRYPTION_IV_VAR]
try:
# pyformat: disable
subprocess.check_call([
"openssl", "aes-256-cbc",
"-K", os.environ[key_var_name],
"-iv", os.environ[iv_var_name],
"-in", flags.FLAGS.encrypted_service_key,
"-out", service_file_path,
"-d",
])
# pyformat: enable
except Exception as e:
redacted_message = _GetRedactedExceptionMessage(e)
raise DecryptionError(
"{} encountered when trying to decrypt the GCP service key: {}".format(
e.__class__.__name__, redacted_message))
def _UploadBuildResults(gcs_bucket: storage.Bucket, gcs_build_results_dir: str):
"""Uploads all build results to Google Cloud Storage."""
logging.info("Will upload build results to gs://%s/%s.",
os.environ[_GCS_BUCKET], gcs_build_results_dir)
for build_result in os.listdir(flags.FLAGS.build_results_dir):
path = os.path.join(flags.FLAGS.build_results_dir, build_result)
if not os.path.isfile(path):
logging.info("Skipping %s as it's not a file.", path)
continue
logging.info("Uploading: %s", path)
gcs_blob = gcs_bucket.blob("{}/{}".format(gcs_build_results_dir,
build_result))
gcs_blob.upload_from_filename(path)
logging.info("GCS upload done.")
def _TriggerAppveyorBuild(project_slug_var_name: str):
"""Sends a POST request to trigger an Appveyor build.
Args:
project_slug_var_name: The name of an environment variable containing an
identifier for the Appveyor job to trigger.
Raises:
AppveyorError: If the trigger attempt is not successful.
"""
data = {
"accountName": os.environ[_APPVEYOR_ACCOUNT_NAME],
"projectSlug": os.environ[project_slug_var_name],
"branch": os.environ[_TRAVIS_BRANCH],
"commitId": os.environ[_TRAVIS_COMMIT],
}
logging.info("Will trigger Appveyor build with params: %s", data)
headers = {"Authorization": "Bearer {}".format(os.environ[_APPVEYOR_TOKEN])}
try:
response = requests.post(_APPVEYOR_API_URL, json=data, headers=headers)
except Exception as e:
redacted_message = _GetRedactedExceptionMessage(e)
raise AppveyorError("{} encountered on POST request: {}".format(
e.__class__.__name__, redacted_message))
if not response.ok:
raise AppveyorError(
"Failed to trigger Appveyor build; got response {}.".format(
response.status_code))
def _UpdateLatestServerDebDirectory(gcs_bucket: storage.Bucket,
gcs_build_results_dir: str):
"""Updates the '_latest_server_deb' GCS directory with the latest results."""
logging.info("Updating latest server deb directory.")
old_build_results = list(
gcs_bucket.list_blobs(prefix=_LATEST_SERVER_DEB_GCS_DIR))
new_build_results = list(gcs_bucket.list_blobs(prefix=gcs_build_results_dir))
if not new_build_results:
raise GCSUploadError(
"Failed to find build results for the server-deb Travis job.")
for gcs_blob in old_build_results:
logging.info("Deleting previous blob: %s", gcs_blob)
gcs_blob.delete()
for gcs_blob in new_build_results:
build_result_filename = gcs_blob.name.split("/")[-1]
latest_build_result_path = "{}/{}".format(_LATEST_SERVER_DEB_GCS_DIR,
build_result_filename)
logging.info("Copying blob %s (%s) -> %s", gcs_blob, gcs_bucket,
latest_build_result_path)
gcs_bucket.copy_blob(
gcs_blob, gcs_bucket, new_name=latest_build_result_path)
def main(argv):
del argv # Unused.
if not flags.FLAGS.encrypted_service_key:
raise ValueError("--encrypted_service_key must be provided.")
if not flags.FLAGS.build_results_dir:
raise ValueError("--build_results_dir must be provided.")
temp_dir = tempfile.mkdtemp()
service_file_path = os.path.join(temp_dir, _DECRYPTED_SERVICE_FILE_NAME)
try:
_DecryptGCPServiceFileTo(service_file_path)
gcs_client = storage.Client.from_service_account_json(service_file_path)
gcs_bucket = gcs_client.get_bucket(os.environ[_GCS_BUCKET])
gcs_build_results_dir = _GetGCSBuildResultsDir()
_UploadBuildResults(gcs_bucket, gcs_build_results_dir)
finally:
shutil.rmtree(temp_dir)
if os.environ[_GCS_TAG] == _UBUNTU_64BIT_TAG:
_TriggerAppveyorBuild(_APPVEYOR_WINDOWS_TEMPLATES_SLUG)
elif os.environ[_GCS_TAG] == _SERVER_DEB_TAG:
_UpdateLatestServerDebDirectory(gcs_bucket, gcs_build_results_dir)
_TriggerAppveyorBuild(_APPVEYOR_E2E_TESTS_SLUG)
_TriggerAppveyorBuild(_APPVEYOR_DOCKER_BUILD_SLUG)
if __name__ == "__main__":
app.run(main)
| 37.762931 | 80 | 0.733478 |
acf634b0ea08815141a478646a04015e5d9aed58 | 891 | py | Python | Chapter 04/dags/myEmail_DAG.py | bpbpublications/Practical-Full-stack-Machine-Learning | 84d0a0bb734c27d014a0373482ed8344a69148c5 | [
"MIT"
] | null | null | null | Chapter 04/dags/myEmail_DAG.py | bpbpublications/Practical-Full-stack-Machine-Learning | 84d0a0bb734c27d014a0373482ed8344a69148c5 | [
"MIT"
] | null | null | null | Chapter 04/dags/myEmail_DAG.py | bpbpublications/Practical-Full-stack-Machine-Learning | 84d0a0bb734c27d014a0373482ed8344a69148c5 | [
"MIT"
] | null | null | null | from airflow import DAG
from airflow.operators.email_operator import EmailOperator
from airflow.operators.python_operator import PythonOperator
from datetime import datetime, timedelta
default_args = {
"owner": "airflow",
"start_date": datetime(2019, 11, 1)
}
def _error_function():
raise Exception('Alert!Alert!')
with DAG(dag_id="myEmail_dag", default_args=default_args,
schedule_interval=None) as dag:
send_completion_mail = EmailOperator(
task_id="send_completion_mail",
to='receivers@mail.com',
subject='Task Completion',
html_content='<p> Your Job was completed <p>')
Dummy_task = PythonOperator(
task_id='Dummy_task',
python_callable=_error_function,
email_on_failure=True,
email='receivers@mail.com')
Dummy_task >> send_completion_mail | 31.821429 | 62 | 0.675645 |
acf63583330c0df8ffeedeb50fed3edcebba7a67 | 3,476 | py | Python | pycqed/instrument_drivers/pq_parameters.py | nuttamas/PycQED_py3 | 1ee35c7428d36ed42ba4afb5d4bda98140b2283e | [
"MIT"
] | 60 | 2016-08-03T10:00:18.000Z | 2021-11-10T11:46:16.000Z | pycqed/instrument_drivers/pq_parameters.py | nuttamas/PycQED_py3 | 1ee35c7428d36ed42ba4afb5d4bda98140b2283e | [
"MIT"
] | 512 | 2016-08-03T17:10:02.000Z | 2022-03-31T14:03:43.000Z | pycqed/instrument_drivers/pq_parameters.py | nuttamas/PycQED_py3 | 1ee35c7428d36ed42ba4afb5d4bda98140b2283e | [
"MIT"
] | 34 | 2016-10-19T12:00:52.000Z | 2022-03-19T04:43:26.000Z | from qcodes.instrument.parameter import ManualParameter
from qcodes.utils.validators import Validator, Strings
import numpy as np
class NP_NANs(Validator):
is_numeric = True
def __init__(self):
self._valid_values = [np.nan]
def __repr__(self):
return "<nan>"
def validate(self, value, context=""):
try:
if not np.isnan(value):
raise ValueError("{} is not nan; {}".format(repr(value), context))
except Exception:
raise ValueError("{} is not nan; {}".format(repr(value), context))
class InstrumentParameter(ManualParameter):
"""
Args:
name (string): the name of the instrument that one wants to add.
instrument (Optional[Instrument]): the "parent" instrument this
parameter is attached to, if any.
initial_value (Optional[string]): starting value, the
only invalid value allowed, and None is only allowed as an initial
value, it cannot be set later
**kwargs: Passed to Parameter parent class
"""
def get_instr(self):
"""
Returns the instance of the instrument with the name equal to the
value of this parameter.
"""
instrument_name = self.get()
# note that _instrument refers to the instrument this parameter belongs
# to, while the instrument_name is the instrument that is the value
# of this parameter.
return self._instrument.find_instrument(instrument_name)
def set_validator(self, vals):
"""
Set a validator `vals` for this parameter.
Args:
vals (Validator): validator to set
"""
if vals is None:
self.vals = Strings()
elif isinstance(vals, Validator):
self.vals = vals
else:
raise TypeError("vals must be a Validator")
class ConfigParameter(ManualParameter):
# TODO: move this to qcodes as a pull request
"""
Define one parameter that reflects a manual configuration setting.
Args:
name (string): the local name of this parameter
instrument (Optional[Instrument]): the instrument this applies to,
if any.
initial_value (Optional[string]): starting value, the
only invalid value allowed, and None is only allowed as an initial
value, it cannot be set later
**kwargs: Passed to Parameter parent class
"""
def __init__(self, name, instrument=None, initial_value=None, **kwargs):
super().__init__(name=name, **kwargs)
self._instrument = instrument
# if the instrument does not have _config_changed attribute creates it
if not hasattr(self._instrument, "_config_changed"):
self._instrument._config_changed = True
self._meta_attrs.extend(["instrument", "initial_value"])
if initial_value is not None:
self.validate(initial_value)
self.cache.set(initial_value)
def set_raw(self, value):
"""
Validate and saves value.
If the value is different from the latest value it sets the
Args:
value (any): value to validate and save
"""
self.validate(value)
if value != self.get_latest():
self._instrument._config_changed = True
self.cache.set(value)
def get_raw(self):
""" Return latest value"""
return self.get_latest()
| 31.315315 | 82 | 0.624281 |
acf6359197a33d1ff7aeacd001c2c068c4246c0d | 2,996 | py | Python | edi/util/check.py | GaloisInc/adapt | 2ccff778d3e77505899266572f8f7caacb5b630f | [
"BSD-3-Clause"
] | 2 | 2020-04-09T13:04:25.000Z | 2021-09-24T14:17:26.000Z | edi/util/check.py | GaloisInc/adapt | 2ccff778d3e77505899266572f8f7caacb5b630f | [
"BSD-3-Clause"
] | null | null | null | edi/util/check.py | GaloisInc/adapt | 2ccff778d3e77505899266572f8f7caacb5b630f | [
"BSD-3-Clause"
] | 3 | 2019-09-20T20:49:54.000Z | 2021-09-02T17:33:47.000Z | import argparse
import csv
import numpy
from . import groundtruth
class Scores:
def __init__(self, reader):
self.header = next(reader)[1:]
#for row in reader:
# print(row[0] +","+ str(row[1]))
self.data = [(row[0], float(row[1]))
for row in reader]
def sort(self,reverse=True):
self.data = sorted(self.data,
key=lambda x: x[1],
reverse=reverse)
def rankScores(scores,gt):
ranks = []
rank = 1
for (uuid,score) in scores.data:
if uuid in gt.data:
ranks = ranks + [(uuid,score,rank)]
rank = rank + 1
return ranks
# Calculate discounted cumulative gain of a list of ranks
def discounted_cumulative_gain(ranks):
dcg = 0.0
for rank in ranks:
dcg = dcg + 1.0/numpy.log2(rank+1)
return dcg
# Calculate max possible DCG and ratio
def normalized_discounted_cumulative_gain(ranks,num_gt):
dcg = discounted_cumulative_gain(ranks)
maxdcg = 0.0
for i in range(1,num_gt+1):
maxdcg = maxdcg + 1.0/numpy.log2(i+1)
return (dcg/maxdcg)
# Calculate area under ROC curve
def area_under_curve(ranks, num_gt, num_trans):
area = 0.0
if num_trans == 0:
return area
increment = 1.0/(num_gt)
for i in range(0,num_trans):
for r in ranks:
if r < i:
area = area + increment
return area / num_trans
def getScores(inputfile,reverse=True):
with open(inputfile) as infile:
scores = Scores(csv.reader(infile))
scores.sort(reverse=reverse)
return scores
def main(inputfile, outfile, ground_truth, gtType,reference=None,reverse=True,debug=True):
def dprint(s):
if debug:
print(s)
scores = getScores(inputfile,reverse)
dprint('Read scores file: %s' % inputfile)
num_trans = len(scores.data)
if reference != None:
with open(reference) as reffile:
rscores = Scores(csv.reader(reffile))
rscores.sort(reverse=reverse)
num_trans = len(rscores.data)
dprint('Number of transactions: %d' % num_trans)
with open(ground_truth) as gtfile:
gt = groundtruth.GroundTruth(csv.reader(gtfile), gtType)
dprint('Read ground truth file: %s' % ground_truth)
num_gt = len(gt.data)
dprint('Number of %s elements: %d' % (gtType, num_gt))
with open(outfile, 'w') as outfile:
outfile.write("uuid,score,rank\n")
uuidScoreRanks = rankScores(scores,gt)
ranks = [rank for (uuid,score,rank) in uuidScoreRanks]
ndcg = normalized_discounted_cumulative_gain(ranks,num_gt)
dprint('NDCG: %f' % ndcg)
auc = area_under_curve(ranks,num_gt,num_trans)
dprint('AUC: %f' % auc)
print('%s,%d,%s,%s,%d,%f,%f' % (inputfile, num_trans, ground_truth, gtType, num_gt, auc, ndcg))
for (uuid, score,rank) in uuidScoreRanks:
outfile.write("%s,%f,%d\n" % (uuid, score, rank))
| 28.533333 | 103 | 0.611482 |
acf635c7b46b6621ba269a14fbfd0b8395c2b17d | 1,337 | py | Python | energyAndMatter.py | superJen99/propnets | 8064d160712409d3ba6cb48ad857c7756155f2c2 | [
"Apache-2.0"
] | null | null | null | energyAndMatter.py | superJen99/propnets | 8064d160712409d3ba6cb48ad857c7756155f2c2 | [
"Apache-2.0"
] | null | null | null | energyAndMatter.py | superJen99/propnets | 8064d160712409d3ba6cb48ad857c7756155f2c2 | [
"Apache-2.0"
] | null | null | null | class gravity: """all the PV, UV, SV, ..., etc stuff"""
predictivePower = [] """props which are listed or in lists of lists, etc almost hiearchical structure"""
def computePredictivePower(sys=[mammal],net=[amygdala], subnet=[area], prop=[cn52],properties=false):
"""sys will have its own means for traversing, net will too, i.e. methods for finding predictive power for each so e.g. the generalised design of a region area of function of a neural network can be designed"""
pass
class vision:
vibrationFrequency: nn
def vibrations:
pass
def frequency:
"""should be a universal method for propHandles"""
def pitch:
"""pitches in the frequency of vision"""
def tone:
"""ditto universal"""
class audio:
"""ditto"""
class text:
def frequency:
pass
"""def other physics traits of text"""
class universeImmediate:
timeDelay = 0;
def vibration:
"""universes vibration"""
pass
def timeDelay():
"""sets time delay array"""
pass
class varAutoEncoder():
"""latent memory PV for this priority!!!"""
"""ID_SV""" """access to current ID of SV or shard of ID i.e. SV"""
"""patient UV"""
"""instructions from SV"""
| 24.309091 | 219 | 0.593867 |
acf6372e8512e41e2da59d59b388110721683e60 | 37,884 | py | Python | shutit_patterns/shutitfile.py | Kafkamorph/shutit | e281be48d6f102518506ad07754116452781a369 | [
"MIT"
] | null | null | null | shutit_patterns/shutitfile.py | Kafkamorph/shutit | e281be48d6f102518506ad07754116452781a369 | [
"MIT"
] | null | null | null | shutit_patterns/shutitfile.py | Kafkamorph/shutit | e281be48d6f102518506ad07754116452781a369 | [
"MIT"
] | null | null | null | #!/usr/bin/env pythen
# The MIT License (MIT)
#
# Copyright (C) 2014 OpenBet Limited
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# ITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from six import iteritems
import json
import logging
import os
import random
import re
import shutil
import sys
try:
from urllib.request import urlopen
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlopen
from urlparse import urlparse
import shutit_global
import shutit_util
import shutit_skeleton
from shutit_module import ShutItFailException
from shutit_module import ShutItModule
#_default_repo_name = shutit_util._default_repo_name
_default_repo_name = 'mymodule'
def setup_shutitfile_pattern(skel_path,
skel_delivery,
skel_domain,
skel_module_name,
skel_shutitfiles,
skel_domain_hash,
skel_depends):
shutit = shutit_global.shutit
runsh_filename = skel_path + '/run.sh'
runsh_file = open(runsh_filename,'w+')
runsh_file.write('''#!/bin/bash
set -e
MODULE_NAME="''' + skel_module_name + '''"
DIR="/tmp/shutit_built/''' + skel_path + '''"
BUILT_DIR="${DIR}/built"
DOMAIN="''' + skel_domain + '''"
DELIVERY="''' + skel_delivery + '''"
PATTERN="''' + skel_delivery + '''"
rm -rf $DIR
shutit skeleton \
--shutitfile ShutItFile1 ShutItFile2 \
--name ${DIR} \
--domain ${DOMAIN} \
--delivery ${DELIVERY} \
--pattern ${PATTERN}
if [[ ${DELIVERY} == 'bash' ]]
then
cd $DIR && ./run.sh "$@"
elif [[ ${DELIVERY} == 'docker' ]]
then
cd $DIR/bin && ./build.sh "$@"
fi''')
runsh_file.close()
os.chmod(runsh_filename,0o755)
# User message
shutit.log('''# Run:
cd ''' + skel_path + ''' && ./run.sh
# to run.
# Or
# cd ''' + skel_path + ''' && ./run.sh -c
# to run while choosing modules to build. ''',transient=True)
shutitfile1_filename = skel_path + '/ShutItFile1'
shutitfile1_file = open(shutitfile1_filename,'w+')
shutitfile1_file.write('''# See [here](https://github.com/ianmiell/shutitfile/blob/master/CheatSheet.md) for
# a cheat sheet.
# Simple script to get going with:
#DESCRIPTION A minimal todo app
#MAINTAINER you@example.com
#FROM alpine
#DELIVERY docker
#INSTALL nodejs
#INSTALL git
#RUN npm install todo
#COMMIT shutitfile:part_one
#PUSH shutitfile:part_one''')
shutitfile1_file.close()
shutitfile2_filename = skel_path + '/ShutItFile2'
shutitfile2_file = open(shutitfile2_filename,'w+')
shutitfile2_file.write('''#DEFAULT_INCLUDE false
#DELIVERY docker
#INSTALL vim
#INSTALL tcpdump
#COMMIT shutitfile:part_two''')
shutitfile2_file.close()
# Parses the shutitfile (passed in as a string)
# and info to extract, and returns a list with the information in a more canonical form, still ordered.
def parse_shutitfile(contents):
ret = []
full_line = ''
for line in contents.split('\n'):
line = line.strip()
# Handle continuations
if len(line) > 0:
if line[-1] == '\\':
full_line += line[0:-1]
continue
else:
full_line += line
if re.match("^IF_NOT+[\s]+([A-Z_]+)[\s]+(.*)$", full_line):
m = re.match("^IF_NOT+[\s]+([A-Z_]+)[\s]+(.*)$", full_line)
ret.append(['IF_NOT',m.group(1),m.group(2)])
elif re.match("^STORE_RUN+[\s]+([a-zA-Z0-9_]+)[\s]+(.*)$", full_line):
m = re.match("^STORE_RUN+[\s]+([a-zA-Z0-9_]+)[\s]+(.*)$", full_line)
ret.append(['STORE_RUN',m.group(1),m.group(2)])
elif re.match("^ELIF_NOT[\s]+([A-Z_]+)[\s]+(.*)$", full_line):
m = re.match("^ELIF_NOT[\s]+([A-Z_]+)[\s]+(.*)$", full_line)
ret.append(['ELIF_NOT',m.group(1),m.group(2)])
elif re.match("^IF[\s]+([A-Z_]+)[\s]+(.*)$", full_line):
m = re.match("^IF[\s]+([A-Z_]+)[\s]+(.*)$", full_line)
ret.append(['IF',m.group(1),m.group(2)])
elif re.match("^ELIF[\s]+([A-Z_]+)[\s]+(.*)$", full_line):
m = re.match("^ELIF[\s]+([A-Z_]+)[\s]+(.*)$", full_line)
ret.append(['ELIF',m.group(1),m.group(2)])
elif re.match("^ELSE$", full_line):
ret.append(['ELSE'])
elif re.match("^ENDIF$", full_line):
ret.append(['ENDIF'])
elif re.match("^([A-Za-z_]+)[\s]*(.*)$", full_line):
m = re.match("^[\s]*([A-Za-z_]+)[\s]*(.*)$", full_line)
ret.append([m.group(1), m.group(2)])
elif re.match("^#(.*)$", full_line):
# Comments should be added with 'COMMENT a comment'
pass
else:
full_line_split = ''.join((full_line[:10000].split()))
full_line_strings = re.findall("[^\x00-\x1F\x7F-\xFF]", full_line_split)
print('FAILED TO PARSE: ' + full_line_strings[:30] + '[...]')
return [], False
full_line = ''
return ret, True
def parse_shutitfile_args(args_str):
"""Parse shutitfile args (eg in the line 'RUN some args', the passed-in args_str would be 'some args').
If the string is bounded by square brackets, then it's treated in the form: ['arg1','arg2'], and the returned list looks the same.
If the string composed entirely of name-value pairs (eg RUN a=b c=d) then it's returned as a dict (eg {'a':'b','c':'d'}).
If what's passed-in is of the form: "COMMAND ['a=b','c=d']" then a dict is also returned.'
Also eg: ["asd and space=value","asd 2=asdgasdg"]"""
ret = []
if args_str == '':
return ret
if args_str[0] == '[' and args_str[-1] == ']':
ret = eval(args_str)
assert type(ret) == list
else:
ret = args_str.split()
# if all the items have a = in them, then return a dict of nv pairs
nv_pairs = True
for item in ret:
if item.find('=') < 0:
nv_pairs = False
if nv_pairs:
d = {}
for item in ret:
item_nv = item.split('=')
d.update({item_nv[0]:item_nv[1]})
ret = d
return ret
# Takes a shutitfile filename and returns a string that represents that Dockerfile as a ShutIt module
def shutitfile_to_shutit_module(skel_shutitfile,
skel_path,
skel_domain,
skel_module_name,
skel_domain_hash,
skel_delivery,
skel_depends,
order,
total,
skel_module_modifier):
shutit = shutit_global.shutit
if not os.path.exists(skel_shutitfile):
if urlparse(skel_shutitfile)[0] == '':
shutit.fail('Dockerfile/ShutItFile "' + skel_shutitfile + '" must exist')
shutitfile_contents = urlopen(skel_shutitfile).read()
shutitfile_dirname = None
else:
shutitfile_contents = open(skel_shutitfile).read()
shutitfile_dirname = os.path.dirname(skel_shutitfile)
if shutitfile_dirname == '':
shutitfile_dirname = './'
if os.path.exists(shutitfile_dirname):
if os.path.exists(skel_path + '/context'):
shutil.rmtree(skel_path + '/context')
shutil.copytree(shutitfile_dirname, skel_path + '/context')
else:
# Copy any other files that do not already exist on the target
os.system('cp -r -n ' + shutitfile_dirname + '/* ' + skel_path)
# Change to this context
os.chdir(shutitfile_dirname)
# Process the shutitfile
shutitfile_representation, ok = shutit_skeleton.process_shutitfile(shutitfile_contents)
if not ok:
return '', '', '', '', '', False
# Check the shutitfile representation
check_shutitfile_representation(shutitfile_representation, skel_delivery)
# Get the shutit module as a string
sections, module_id, module_name, depends, default_include = generate_shutit_module_sections(shutitfile_representation, skel_domain, skel_module_name, skel_module_modifier, skel_shutitfile, skel_depends, order, total)
if module_id == skel_module_name:
module_id = skel_domain + """.""" + skel_module_name + skel_module_modifier
# Final section
final_section = """
def module():
return """ + skel_module_name + skel_module_modifier + """(
'""" + module_id + """', """ + skel_domain_hash + str(order * 0.0001) + str(random.randint(1,999)) + """,
description='""" + shutitfile_representation['shutitfile']['description'] + """',
delivery_methods=[('""" + skel_delivery + """')],
maintainer='""" + shutitfile_representation['shutitfile']['maintainer'] + """',
depends=[""" + depends + """]
)
"""
sections.update({'final_section':final_section})
# Return program to main shutit_dir
if shutitfile_dirname:
os.chdir(sys.path[0])
return sections, module_id, skel_module_name, default_include, ok
def check_shutitfile_representation(shutitfile_representation, skel_delivery):
# delivery directives
# Only allow one type of delivery
shutit = shutit_global.shutit
shutitfile_delivery = set()
# If we've been given a delivery method, add that.
if skel_delivery:
shutitfile_delivery.add(skel_delivery)
for item in shutitfile_representation['shutitfile']['delivery']:
shutitfile_delivery.add(item[1])
if len(shutitfile_delivery) > 1:
shutit.fail('Conflicting delivery methods in ShutItFile')
elif len(shutitfile_delivery) == 1:
skel_delivery = shutitfile_delivery.pop()
else:
# Default skel_delivery to bash if none seen
skel_delivery = 'bash'
if skel_delivery not in shutit_util.allowed_delivery_methods:
shutit.fail('Disallowed delivery method in ShutItFile: ' + skel_delivery)
if skel_delivery != 'docker':
# FROM, ONBUILD, VOLUME, EXPOSE, ENTRYPOINT, CMD, COMMIT, PUSH are verboten
failed = False
if shutitfile_representation['shutitfile']['cmd'] != '' or shutitfile_representation['shutitfile']['volume'] != [] or shutitfile_representation['shutitfile']['onbuild'] != [] or shutitfile_representation['shutitfile']['expose'] != [] or shutitfile_representation['shutitfile']['entrypoint'] != []:
failed = True
for item in shutitfile_representation['shutitfile']['script']:
if item[0] in ('PUSH','COMMIT'):
failed = True
break
if failed:
shutit.fail('One of FROM, ONBUILD, VOLUME, EXPOSE, ENTRYPOINT or CMD, COMMIT, PUSH used in ShutItFile not using the Docker delivery method.')
def generate_shutit_module_sections(shutitfile_representation, skel_domain, skel_module_name, skel_module_modifier, skel_shutitfile, skel_depends, order, total):
shutit = shutit_global.shutit
sections = {}
sections.update({'header_section':'\n# Created from shutitfile: ' + skel_shutitfile + '\n# Maintainer: ' + shutitfile_representation['shutitfile']['maintainer'] + '\nfrom shutit_module import ShutItModule\n\nclass ' + skel_module_name + skel_module_modifier + '(ShutItModule):\n\n\tdef is_installed(self, shutit):\n\t\treturn False'})
# config section - this must be done first, as it collates the config
# items that can be referenced later
config_section = '''
# CONFIGURATION
# shutit.get_config(module_id,option,default=None,boolean=False)
# - Get configuration value, boolean indicates whether the item is
# a boolean type, eg get the config with:
# shutit.get_config(self.module_id, 'myconfig', default='a value')
# and reference in your code with:
# shutit.cfg[self.module_id]['myconfig']'''
if shutitfile_representation['shutitfile']['module_id']:
module_id = shutitfile_representation['shutitfile']['module_id']
else:
# If the total number of modules is more than 1, then we want to number these modules.
if total > 1:
module_id = '%s.%s.%s_%s' % (skel_domain, skel_module_name, skel_module_name, str(order))
else:
module_id = '%s.%s.%s' % (skel_domain, skel_module_name, skel_module_name)
build = ''
for item in shutitfile_representation['shutitfile']['config']:
build += handle_shutitfile_config_line(item)
if build:
config_section += '\n\t\t' + build
sections.update({'config_section':config_section})
# build
build = ''
numpushes = 0
numlogins = 0
ifdepth = 0
wgetgot = False
current_note = ''
# section is the section of the shutitfile we're in. Default is 'build', but there are also a few others.
section = 'build'
build_section = ''
for item in shutitfile_representation['shutitfile']['script']:
section = shutitfile_get_section(item[0], section)
if section == 'build':
ret = handle_shutitfile_script_line(item, numpushes, wgetgot, numlogins, ifdepth, current_note)
build += ret[0]
numpushes = ret[1]
wgetgot = ret[2]
numlogins = ret[3]
ifdepth = ret[4]
current_note = ret[5]
build_section += shutit_util._build_section + build
while numpushes > 0:
build_section += '''\n\t\tshutit.send('popd')'''
numpushes -= 1
while numlogins > 0:
build_section += '''\n\t\tshutit.logout()'''
numlogins -= 1
if ifdepth != 0:
shutit.fail('Unbalanced IFs in ' + section + ' section')
sections.update({'build_section':build_section})
# finalize section
finalize = ''
for line in shutitfile_representation['shutitfile']['onbuild']:
finalize += '\n\n\t\tshutit.send(\'' + line + ')\''
sections.update({'finalize_section':finalize})
# test section
build = ''
test_section = ''
numpushes = 0
numlogins = 0
ifdepth = 0
current_note = ''
for item in shutitfile_representation['shutitfile']['script']:
section = shutitfile_get_section(item[0], section)
if section == 'test':
ret = handle_shutitfile_script_line(item, numpushes, wgetgot, numlogins, ifdepth, current_note)
build += ret[0]
numpushes = ret[1]
wgetgot = ret[2]
numlogins = ret[3]
ifdepth = ret[4]
current_note = ret[5]
if build:
test_section += '\n\t\t' + build
while numpushes > 0:
test_section += """\n\t\tshutit.send('popd')"""
numpushes -= 1
while numlogins > 0:
test_section += '''\n\t\tshutit.logout()'''
numlogins -= 1
sections.update({'test_section':test_section})
# isinstalled section
build = ''
isinstalled_section = ''
numpushes = 0
numlogins = 0
ifdepth = 0
current_note = ''
for item in shutitfile_representation['shutitfile']['script']:
section = shutitfile_get_section(item[0], section)
if section == 'isinstalled':
ret = handle_shutitfile_script_line(item, numpushes, wgetgot, numlogins, ifdepth, current_note)
build += ret[0]
numpushes = ret[1]
wgetgot = ret[2]
numlogins = ret[3]
ifdepth = ret[4]
current_note = ret[5]
if build:
isinstalled_section += '\n\t\t' + build
while numpushes > 0:
isinstalled_section += """\n\t\tshutit.send('popd')"""
numpushes -= 1
while numlogins > 0:
isinstalled_section += '''\n\t\tshutit.logout()'''
numlogins -= 1
if ifdepth != 0:
shutit.fail('Unbalanced IFs in ' + section + ' section')
sections.update({'isinstalled_section':isinstalled_section})
# start section
build = ''
start_section = ''
numpushes = 0
numlogins = 0
ifdepth = 0
current_note = ''
for item in shutitfile_representation['shutitfile']['script']:
section = shutitfile_get_section(item[0], section)
if section == 'start':
ret = handle_shutitfile_script_line(item, numpushes, wgetgot, numlogins, ifdepth, current_note)
build += ret[0]
numpushes = ret[1]
wgetgot = ret[2]
numlogins = ret[3]
ifdepth = ret[4]
current_note = ret[5]
if build:
start_section += '\n\t\t' + build
while numpushes > 0:
start_section += """\n\t\tshutit.send('popd')"""
numpushes -= 1
while numlogins > 0:
start_section += '''\n\t\tshutit.logout()'''
numlogins -= 1
if ifdepth != 0:
shutit.fail('Unbalanced IFs in ' + section + ' section')
sections.update({'start_section':start_section})
# stop section
build = ''
stop_section = ''
numpushes = 0
numlogins = 0
ifdepth = 0
current_note = ''
for item in shutitfile_representation['shutitfile']['script']:
section = shutitfile_get_section(item[0], section)
if section == 'stop':
ret = handle_shutitfile_script_line(item, numpushes, wgetgot, numlogins, ifdepth, current_note)
build += ret[0]
numpushes = ret[1]
wgetgot = ret[2]
numlogins = ret[3]
ifdepth = ret[4]
current_note = ret[5]
if build:
stop_section += '\n\t\t' + build
while numpushes > 0:
stop_section += """\n\t\tshutit.send('popd')"""
numpushes -= 1
while numlogins > 0:
stop_section += '''\n\t\tshutit.logout()'''
numlogins -= 1
if ifdepth != 0:
shutit.fail('Unbalanced IFs in ' + section + ' section')
sections.update({'stop_section':stop_section})
# dependencies section
shutitfile_depends = []
for item in shutitfile_representation['shutitfile']['depends']:
shutitfile_depends.append(item[1])
if len(shutitfile_depends):
depends = "'" + skel_depends + "','" + "','".join(shutitfile_depends) + "'"
else:
depends = "'" + skel_depends + "'"
if shutitfile_representation['shutitfile']['default_include'] == 'false':
default_include = 'no'
elif shutitfile_representation['shutitfile']['default_include'] == 'true':
default_include = 'yes'
else:
shutit.fail('Unrecognised DEFAULT_INCLUDE - must be true/false: ' + shutitfile_representation['shutitfile']['default_include'])
if shutitfile_representation['shutitfile']['module_id']:
module_id = shutitfile_representation['shutitfile']['module_id']
else:
module_id = skel_module_name
return sections, module_id, module_id, depends, default_include
def handle_shutitfile_config_line(line):
shutitfile_command = line[0].upper()
build = ''
numtabs = 2
assert shutitfile_command in ('CONFIG','CONFIG_SECRET'), '%r is not a handled config command' % shutitfile_command
if shutitfile_command in ('CONFIG','CONFIG_SECRET'):
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) in (dict,list)
if shutitfile_command == 'CONFIG':
secret_str = 'False'
elif shutitfile_command == 'CONFIG_SECRET':
secret_str = 'True'
if type(shutitfile_args) == list:
assert len(shutitfile_args) in (1,2), ''
cfg_name = shutitfile_args[0]
if len(shutitfile_args) == 1:
build += """\n""" + numtabs*'\t' + """shutit.get_config(self.module_id,'""" + cfg_name + """',secret=""" + secret_str + """)"""
elif len(shutitfile_args) == 2:
cfg_default = shutitfile_args[1]
build += """\n""" + numtabs*'\t' + """shutit.get_config(self.module_id,'""" + cfg_name + """',default='""" + cfg_default + """',secret=""" + secret_str + """)"""
return build
def handle_shutitfile_script_line(line, numpushes, wgetgot, numlogins, ifdepth, current_note):
shutitfile_command = line[0].upper()
shutit = shutit_global.shutit
build = ''
numtabs = 2 + ifdepth
assert shutitfile_command in ('RUN','SEND','SEND_EXPECT','SEND_EXPECT_MULTI','EXPECT_REACT','SEND_EXPECT_REACT','SEND_UNTIL','UNTIL','UNTIL','ASSERT_OUTPUT_SEND','ASSERT_OUTPUT','PAUSE_POINT','EXPECT','EXPECT_MULTI','LOGIN','USER','LOGOUT','GET_AND_SEND_PASSWORD','LOGIN_WITH_PASSWORD','USER_WITH_PASSWORD','WORKDIR','COPY','ADD','ENV','INSTALL','REMOVE','COMMENT','NOTE','IF','ELSE','ELIF','IF_NOT','ELIF_NOT','ENDIF','RUN_SCRIPT','SCRIPT_BEGIN','START_BEGIN','START_END','STOP_BEGIN','STOP_END','TEST_BEGIN','TEST_END','BUILD_BEGIN','BUILD_END','ISINSTALLED_BEGIN','ISINSTALLED_END','COMMIT','PUSH','REPLACE_LINE','LOG','QUIT','STORE_RUN'), '%r is not a handled script command' % shutitfile_command
if shutitfile_command in ('RUN','SEND'):
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.send('''""" + cmd + """''',note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'SEND_EXPECT':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.send('''""" + cmd + """''',note='''""" + current_note + """''',expect="""
current_note = ''
elif shutitfile_command == 'EXPECT':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """'''""" + cmd + """''')"""
elif shutitfile_command == 'SEND_EXPECT_MULTI':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.multisend('''""" + cmd + """''',"""
current_note = ''
elif shutitfile_command == 'EXPECT_MULTI':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == dict
multi_dict = str(shutitfile_args)
build += multi_dict + """,note='''""" + current_note + """''')"""
elif shutitfile_command == 'SEND_EXPECT_REACT':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.send('''""" + cmd + """''',note='''""" + current_note + """''',follow_on_commands="""
current_note = ''
elif shutitfile_command == 'EXPECT_REACT':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == dict
multi_dict = str(shutitfile_args)
# We don't check exit here, as reactions will often have failing commands.
build += multi_dict + ",check_exit=False)"
elif shutitfile_command == 'SEND_UNTIL':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.send_until('''""" + cmd + """''',"""
current_note = ''
elif shutitfile_command == 'UNTIL':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """'''""" + cmd + """''',note='''""" + current_note + """''')"""
elif shutitfile_command == 'PAUSE_POINT':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
msg = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.pause_point('''""" + msg + """''')"""
elif shutitfile_command == 'QUIT':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.fail('''""" + cmd + """''')"""
elif shutitfile_command == 'LOGOUT':
build += """\n""" + numtabs*'\t' + """shutit.logout(note='''""" + current_note + """''')"""
current_note = ''
numlogins -= 1
elif shutitfile_command == 'ASSERT_OUTPUT_SEND':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """_cmd = '''""" + cmd + """'''\n\t\t_output = shutit.send_and_get_output('''""" + cmd + """''',note='''""" + current_note + """''')\n\t\timport re\n\t\tif not re.match('''"""
current_note = ''
elif shutitfile_command == 'ASSERT_OUTPUT':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
expected_output = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += expected_output + """''', _output):\n""" + numtabs*'\t' + """\tshutit.pause_point('''Output of: ''' + _cmd + ''' was: ''' + _output + ''' It should be: """ + expected_output + """''')"""
elif shutitfile_command == 'LOGIN':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.login(command='""" + cmd + """',note='''""" + current_note + """''')"""
current_note = ''
numlogins += 1
elif shutitfile_command == 'USER':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.login(user='""" + cmd + """',note='''""" + current_note + """''')"""
current_note = ''
numlogins += 1
elif shutitfile_command == 'GET_AND_SEND_PASSWORD':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
msg = scan_text(' '.join(shutitfile_args)) + '\n'
build += """\n""" + numtabs*'\t' + """_password = shutit.get_input('''""" + msg + """''',ispass=True)"""
build += """\n""" + numtabs*'\t' + """shutit.send(_password,echo=False,check_exit=False,note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'LOGIN_WITH_PASSWORD':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
msg = scan_text(line[2]) + '\n'
build += """\n""" + numtabs*'\t' + """_password = shutit.get_input('''""" + msg + """''',ispass=True)"""
build += """\n""" + numtabs*'\t' + """shutit.login(command='""" + cmd + """', password=_password,note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'USER_WITH_PASSWORD':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
msg = scan_text(line[2]) + '\n'
build += """\n""" + numtabs*'\t' + """_password = shutit.get_input('''""" + msg + """''',ispass=True)"""
build += """\n""" + numtabs*'\t' + """shutit.login(user='""" + cmd + """', password=_password,note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'WORKDIR':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
cmd = scan_text(' '.join(shutitfile_args).replace("'", "\\'"))
build += """\n""" + numtabs*'\t' + """shutit.send('''pushd """ + cmd + """''',echo=False,note='''""" + current_note + """''')"""
current_note = ''
numpushes += 1
elif shutitfile_command == 'COPY' or shutitfile_command == 'ADD':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
# The <src> path must be inside the context of the build; you cannot COPY ../something /something, because the first step of a docker build is to send the context directory (and subdirectories) to the docker daemon.
if shutitfile_args[0][0:1] == '..' or shutitfile_args[0][0] == '/' or shutitfile_args[0][0] == '~':
shutit.fail('Invalid line: ' + str(shutitfile_args) + ' file must be in local subdirectory')
if shutitfile_args[1][-1] == '/':
# Dir we're COPYing or ADDing to
destdir = scan_text(shutitfile_args[1])
# File/dir we're COPYing or ADDing from
fromfile = scan_text(shutitfile_args[0])
# Final file/dir
outfile = destdir + fromfile
if os.path.isfile(fromfile):
outfiledir = os.path.dirname(fromfile)
build += """\n""" + numtabs*'\t' + """shutit.send('''mkdir -p """ + destdir + """/""" + outfiledir + """''',note='''""" + current_note + """''')"""
current_note = ''
elif os.path.isdir(fromfile):
build += """\n""" + numtabs*'\t' + """shutit.send('''mkdir -p """ + destdir + fromfile + """''',note='''""" + current_note + """''')"""
current_note = ''
else:
outfile = shutitfile_args[1]
# If this is something we have to wget:
if shutitfile_command == 'ADD' and urlparse(shutitfile_args[0])[0] != '':
if not wgetgot:
build += """\n""" + numtabs*'\t' + """shutit.install('wget')"""
wgetgot = True
if shutitfile_args[1][-1] == '/':
destdir = scan_text(destdir[0:-1])
outpath = scan_text(urlparse(shutitfile_args[0])[2])
outpathdir = os.path.dirname(outpath)
build += """\n""" + numtabs*'\t' + """shutit.send('''mkdir -p """ + destdir + outpathdir + """''')"""
build += """\n""" + numtabs*'\t' + """shutit.send('''wget -O """ + destdir + outpath + ' ' + shutitfile_args[0] + """''',note='''""" + current_note + """''')"""
current_note = ''
else:
outpath = scan_text(shutitfile_args[1])
destdir = scan_text(os.path.dirname(shutitfile_args[1]))
build += """\n""" + numtabs*'\t' + """shutit.send('''mkdir -p """ + destdir + """''')"""
build += """\n""" + numtabs*'\t' + """shutit.send('''wget -O """ + outpath + ' ' + shutitfile_args[0] + """''',note='''""" + current_note + """''')"""
current_note = ''
else:
# From the local filesystem on construction:
localfile = scan_text(shutitfile_args[0])
# Local file location on build:
buildstagefile = scan_text(shutitfile_args[0])
#if localfile[-4:] == '.tar':
# build += """\n\t\tshutit.send_file('""" + outfile + '/' + localfile + """')"""
#elif localfile[-4:] == '.bz2':
#elif localfile[-3:] == '.gz':
#elif localfile[-3:] == '.xz':
if os.path.isdir(localfile):
build += """\n""" + numtabs*"""\t""" + """shutit.send_host_dir('''""" + outfile + """''', '''""" + buildstagefile + """''',note='''""" + current_note + """''')"""
current_note = ''
else:
build += """\n""" + numtabs*"""\t""" + """shutit.send_host_file('''""" + outfile + """''', '''""" + buildstagefile + """''',note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'ENV':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == dict
for k,v in iteritems(shutitfile_args):
k = scan_text(k)
v = scan_text(v)
build += """\n""" + numtabs*"""\t""" + """shutit.send('''export """ + k + '=' + v + """''',note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'INSTALL':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
build += """\n""" + numtabs*"""\t""" + """shutit.install('''""" + scan_text(' '.join(shutitfile_args)) + """''',note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'REMOVE':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
build += """\n""" + numtabs*'\t' + """shutit.remove('''""" + scan_text(' '.join(shutitfile_args)) + """''',note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command in ('COMMENT','NOTE'):
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
# COMMENT line should come before the next appropriate line where it can be used, where it is 'consumed' in a note.
build += """\n""" + numtabs*"""\t""" + """# """ + scan_text(' '.join(shutitfile_args))
current_note += scan_text(' '.join(shutitfile_args))
elif shutitfile_command in ('IF','IF_NOT'):
subcommand = scan_text(line[1])
subcommand_args = scan_text(' '.join(line[2:]))
if subcommand == 'FILE_EXISTS':
statement = """shutit.file_exists('''""" + subcommand_args + """''',directory=None,note='''""" + current_note + """''')"""
current_note = ''
elif subcommand == 'INSTALL_TYPE':
statement = """shutit.get_current_shutit_pexpect_session_environment(note='''""" + current_note + """''').install_type == '''""" + subcommand_args + """'''"""
current_note = ''
elif subcommand == 'RUN':
statement = """shutit.send_and_return_status('''""" + subcommand_args + """''',check_exit=False,note='''""" + current_note + """''')"""
current_note = ''
else:
shutit.fail('subcommand: ' + subcommand + ' not handled')
if shutitfile_command == 'IF':
build += """\n""" + numtabs*"""\t""" + """if """ + statement + """:"""
elif shutitfile_command == 'IF_NOT':
build += """\n""" + numtabs*"""\t""" + """if not """ + statement + """:"""
ifdepth += 1
elif shutitfile_command == 'ELSE':
if shutitfile_command == 'ELSE':
build += """\n""" + (numtabs-1)*"""\t""" + """else:"""
elif shutitfile_command in ('ELIF','ELIF_NOT'):
subcommand = scan_text(line[1])
subcommand_args = scan_text(' '.join(line[2:]))
if subcommand == 'FILE_EXISTS':
statement = """shutit.file_exists('''""" + subcommand_args + """''',directory=None,note='''""" + current_note + """''')"""
current_note = ''
elif subcommand == 'INSTALL_TYPE':
statement = """shutit.get_current_shutit_pexpect_session_environment(note='''""" + current_note + """''').install_type == '""" + subcommand_args + """'"""
current_note = ''
elif subcommand == 'RUN':
statement = """shutit.send_and_return_status('''""" + subcommand_args + """''',check_exit=False,note='''""" + current_note + """''')"""
current_note = ''
else:
shutit.fail('subcommand: ' + subcommand + ' not handled')
if shutitfile_command == 'ELIF':
build += """\n""" + (numtabs-1)*'\t' + '''elif ''' + statement + ''':'''
elif shutitfile_command == 'ELIF_NOT':
build += """\n""" + (numtabs-1)*"""\t""" + """elif not """ + statement + """:"""
elif shutitfile_command == 'ENDIF':
ifdepth -= 1
elif shutitfile_command == 'RUN_SCRIPT':
shutitfile_args = line[1]
assert type(shutitfile_args) == str
script = scan_text(shutitfile_args)
build += """\n""" + numtabs*"""\t""" + """shutit.run_script('''""" + script + """''',note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'COMMIT':
global _default_repo_name
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
assert len(shutitfile_args) == 1
repo_name = scan_text(shutitfile_args[0])
if repo_name == _default_repo_name:
shutit.log('The docker container will be committed with the default repo_name: ' + _default_repo_name + '.\nYou can change this by adding this to the ~/.shutit/config file:\n\n[repository]\nname:yourname\n\nand re-running.',level=logging.WARNING)
if len(shutitfile_args) == 1:
build += """\n""" + numtabs*"""\t""" + """shutit.do_repository_work('''""" + repo_name + """''',force=None,tag=True,note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'PUSH':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == list
assert len(shutitfile_args) == 1
assert shutit.repository['user'] != '', 'If you want to push, set the [repository] settings (user,password,email) in your ~/.shutit/config file.'
repo_name = scan_text(shutitfile_args[0])
build += """\n""" + numtabs*"""\t""" + """shutit.push_repository('''""" + repo_name + """''',note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'REPLACE_LINE':
shutitfile_args = parse_shutitfile_args(line[1])
assert type(shutitfile_args) == dict
# TODO: assert existence of these
line = scan_text(shutitfile_args['line'])
filename = scan_text(shutitfile_args['filename'])
pattern = scan_text(shutitfile_args['pattern'])
build += """\n""" + numtabs*'\t' + """shutit.replace_text('''""" + line + """''','''""" + filename + """''',pattern='''""" + pattern + """''',note='''""" + current_note + """''')"""
current_note = ''
elif shutitfile_command == 'LOG':
shutitfile_args = scan_text(line[1])
assert type(shutitfile_args) == str and shutitfile_args in ('DEBUG','WARNING','CRITICAL','INFO','ERROR')
build += """\n""" + numtabs*'\t' + """import logging"""
build += """\n""" + numtabs*'\t' + """logging.getLogger().setLevel(logging.""" + shutitfile_args + """)"""
elif shutitfile_command == 'STORE_RUN':
config_item = scan_text(line[1])
command = scan_text(' '.join(line[2:]))
build += """\n""" + numtabs*'\t' + """shutit.cfg[self.module_id]['""" + config_item + """'] = shutit.send_and_get_output('''""" + command + """''',note='''""" + current_note + """''')"""
current_note = ''
# See shutitfile_get_section
elif shutitfile_command in ('SCRIPT_BEGIN','START_BEGIN','START_END','STOP_BEGIN','STOP_END','TEST_BEGIN','TEST_END','BUILD_BEGIN','BUILD_END','ISINSTALLED_BEGIN','ISINSTALLED_END'):
# No action to perform on these lines, but they are legal.
pass
else:
shutit.fail('shutitfile_command: ' + shutitfile_command + ' not handled')
return build, numpushes, wgetgot, numlogins, ifdepth, current_note
def scan_text(text):
"""Scan text, and replace items that match shutit's pattern format, ie:
{{ shutit.THING }}
"""
while True:
match = re.match("(.*){{ shutit.(.*) }}(.*)$", text)
if match:
before = match.group(1)
name = match.group(2)
after = match.group(3)
text = before + """''' + shutit.cfg[self.module_id][\"""" + name + """\"] + '''""" + after
else:
break
return text
# Get the section of the shutitfile we are in.
def shutitfile_get_section(shutitfile_command, current):
match = re.match(r'^(.*)_(BEGIN|END)$',shutitfile_command)
if match:
section = match.group(1)
stage = match.group(2)
if stage == 'BEGIN':
return section.lower()
else:
return 'build'
return current
| 43.745958 | 701 | 0.638528 |
acf63864164e855f5bbc089ab18ff12af66033f1 | 99 | py | Python | run.py | whoismaruf/flask2do | d5f23cecee7f0a5d28f15defadea4bda1493bf36 | [
"Unlicense",
"MIT"
] | 1 | 2021-07-02T18:22:39.000Z | 2021-07-02T18:22:39.000Z | run.py | whoismaruf/flask2do | d5f23cecee7f0a5d28f15defadea4bda1493bf36 | [
"Unlicense",
"MIT"
] | 1 | 2020-10-08T06:16:22.000Z | 2020-10-09T14:09:22.000Z | run.py | whoismaruf/flask2do | d5f23cecee7f0a5d28f15defadea4bda1493bf36 | [
"Unlicense",
"MIT"
] | null | null | null | # Only for local development server
from root import app
if __name__ == "__main__":
app.run() | 16.5 | 35 | 0.707071 |
acf639f29df07305b8727ac8d6cf96c71fcd75ac | 11,477 | py | Python | openstackclient/identity/v3/project.py | redhat-openstack/python-openstackclient | 7dc2e1dc08b0692a3accb343c62451fb3d83f4cd | [
"Apache-2.0"
] | null | null | null | openstackclient/identity/v3/project.py | redhat-openstack/python-openstackclient | 7dc2e1dc08b0692a3accb343c62451fb3d83f4cd | [
"Apache-2.0"
] | null | null | null | openstackclient/identity/v3/project.py | redhat-openstack/python-openstackclient | 7dc2e1dc08b0692a3accb343c62451fb3d83f4cd | [
"Apache-2.0"
] | null | null | null | # Copyright 2012-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Project action implementations"""
import six
from keystoneauth1 import exceptions as ks_exc
from openstackclient.common import command
from openstackclient.common import parseractions
from openstackclient.common import utils
from openstackclient.i18n import _
from openstackclient.identity import common
class CreateProject(command.ShowOne):
"""Create new project"""
def get_parser(self, prog_name):
parser = super(CreateProject, self).get_parser(prog_name)
parser.add_argument(
'name',
metavar='<project-name>',
help=_('New project name'),
)
parser.add_argument(
'--domain',
metavar='<domain>',
help=_('Domain owning the project (name or ID)'),
)
parser.add_argument(
'--parent',
metavar='<project>',
help=_('Parent of the project (name or ID)'),
)
parser.add_argument(
'--description',
metavar='<description>',
help=_('Project description'),
)
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable',
action='store_true',
help=_('Enable project'),
)
enable_group.add_argument(
'--disable',
action='store_true',
help=_('Disable project'),
)
parser.add_argument(
'--property',
metavar='<key=value>',
action=parseractions.KeyValueAction,
help=_('Add a property to <name> '
'(repeat option to set multiple properties)'),
)
parser.add_argument(
'--or-show',
action='store_true',
help=_('Return existing project'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
domain = None
if parsed_args.domain:
domain = common.find_domain(identity_client,
parsed_args.domain).id
parent = None
if parsed_args.parent:
parent = utils.find_resource(
identity_client.projects,
parsed_args.parent,
).id
enabled = True
if parsed_args.disable:
enabled = False
kwargs = {}
if parsed_args.property:
kwargs = parsed_args.property.copy()
try:
project = identity_client.projects.create(
name=parsed_args.name,
domain=domain,
parent=parent,
description=parsed_args.description,
enabled=enabled,
**kwargs
)
except ks_exc.Conflict as e:
if parsed_args.or_show:
project = utils.find_resource(identity_client.projects,
parsed_args.name,
domain_id=domain)
self.log.info(_('Returning existing project %s'), project.name)
else:
raise e
project._info.pop('links')
return zip(*sorted(six.iteritems(project._info)))
class DeleteProject(command.Command):
"""Delete project(s)"""
def get_parser(self, prog_name):
parser = super(DeleteProject, self).get_parser(prog_name)
parser.add_argument(
'projects',
metavar='<project>',
nargs="+",
help=_('Project(s) to delete (name or ID)'),
)
parser.add_argument(
'--domain',
metavar='<domain>',
help=_('Domain owning <project> (name or ID)'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
domain = None
if parsed_args.domain:
domain = common.find_domain(identity_client, parsed_args.domain)
for project in parsed_args.projects:
if domain is not None:
project_obj = utils.find_resource(identity_client.projects,
project,
domain_id=domain.id)
else:
project_obj = utils.find_resource(identity_client.projects,
project)
identity_client.projects.delete(project_obj.id)
class ListProject(command.Lister):
"""List projects"""
def get_parser(self, prog_name):
parser = super(ListProject, self).get_parser(prog_name)
parser.add_argument(
'--domain',
metavar='<domain>',
help=_('Filter projects by <domain> (name or ID)'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Filter projects by <user> (name or ID)'),
)
parser.add_argument(
'--long',
action='store_true',
default=False,
help=_('List additional fields in output'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
if parsed_args.long:
columns = ('ID', 'Name', 'Domain ID', 'Description', 'Enabled')
else:
columns = ('ID', 'Name')
kwargs = {}
domain_id = None
if parsed_args.domain:
domain_id = common.find_domain(identity_client,
parsed_args.domain).id
kwargs['domain'] = domain_id
if parsed_args.user:
if parsed_args.domain:
user_id = utils.find_resource(identity_client.users,
parsed_args.user,
domain_id=domain_id).id
else:
user_id = utils.find_resource(identity_client.users,
parsed_args.user).id
kwargs['user'] = user_id
data = identity_client.projects.list(**kwargs)
return (columns,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class SetProject(command.Command):
"""Set project properties"""
def get_parser(self, prog_name):
parser = super(SetProject, self).get_parser(prog_name)
parser.add_argument(
'project',
metavar='<project>',
help=_('Project to modify (name or ID)'),
)
parser.add_argument(
'--name',
metavar='<name>',
help=_('Set project name'),
)
parser.add_argument(
'--domain',
metavar='<domain>',
help=_('Domain owning <project> (name or ID)'),
)
parser.add_argument(
'--description',
metavar='<description>',
help=_('Set project description'),
)
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable',
action='store_true',
help=_('Enable project'),
)
enable_group.add_argument(
'--disable',
action='store_true',
help=_('Disable project'),
)
parser.add_argument(
'--property',
metavar='<key=value>',
action=parseractions.KeyValueAction,
help=_('Set a property on <project> '
'(repeat option to set multiple properties)'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
if (not parsed_args.name
and not parsed_args.domain
and not parsed_args.description
and not parsed_args.enable
and not parsed_args.property
and not parsed_args.disable):
return
project = common.find_project(identity_client,
parsed_args.project,
parsed_args.domain)
kwargs = {}
if parsed_args.name:
kwargs['name'] = parsed_args.name
if parsed_args.description:
kwargs['description'] = parsed_args.description
if parsed_args.enable:
kwargs['enabled'] = True
if parsed_args.disable:
kwargs['enabled'] = False
if parsed_args.property:
kwargs.update(parsed_args.property)
identity_client.projects.update(project.id, **kwargs)
class ShowProject(command.ShowOne):
"""Display project details"""
def get_parser(self, prog_name):
parser = super(ShowProject, self).get_parser(prog_name)
parser.add_argument(
'project',
metavar='<project>',
help=_('Project to display (name or ID)'),
)
parser.add_argument(
'--domain',
metavar='<domain>',
help=_('Domain owning <project> (name or ID)'),
)
parser.add_argument(
'--parents',
action='store_true',
default=False,
help=_('Show the project\'s parents as a list'),
)
parser.add_argument(
'--children',
action='store_true',
default=False,
help=_('Show project\'s subtree (children) as a list'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
if parsed_args.domain:
domain = common.find_domain(identity_client, parsed_args.domain)
project = utils.find_resource(
identity_client.projects,
parsed_args.project,
domain_id=domain.id,
parents_as_list=parsed_args.parents,
subtree_as_list=parsed_args.children)
else:
project = utils.find_resource(
identity_client.projects,
parsed_args.project,
parents_as_list=parsed_args.parents,
subtree_as_list=parsed_args.children)
if project._info.get('parents'):
project._info['parents'] = [str(p['project']['id'])
for p in project._info['parents']]
if project._info.get('subtree'):
project._info['subtree'] = [str(p['project']['id'])
for p in project._info['subtree']]
project._info.pop('links')
return zip(*sorted(six.iteritems(project._info)))
| 33.363372 | 79 | 0.538294 |
acf63a903f760d5fb1d26d509b4d4a526d77b090 | 1,597 | py | Python | main.py | Omerdan03/dog_scraper | 85c22ec7b378bb2736fd7c8ef0314616c957dbc1 | [
"MIT"
] | null | null | null | main.py | Omerdan03/dog_scraper | 85c22ec7b378bb2736fd7c8ef0314616c957dbc1 | [
"MIT"
] | null | null | null | main.py | Omerdan03/dog_scraper | 85c22ec7b378bb2736fd7c8ef0314616c957dbc1 | [
"MIT"
] | null | null | null | import argparse
import threading
from selenium.webdriver.chrome.options import Options
from tqdm import tqdm
from scraper import const, Scraper
def str2bool(str):
if isinstance(str, bool):
return str
if str.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif str.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def main():
parser = argparse.ArgumentParser(
prog='dog_scraper',
formatter_class=argparse.RawTextHelpFormatter,
description="Runs the scrapper")
parser.add_argument('-debug', metavar='debug_mode', default=False, type=str2bool,
help='option for running with or without debugging.')
parser.add_argument('-thredlimit', '-limit', metavar='debug_mode', default=-1, type=int,
help='option for running with or without debugging.')
args = parser.parse_args()
chrome_options = Options()
if args.debug:
chrome_options.add_argument("--headless")
if args.thredlimit != -1:
thread_limiter = threading.BoundedSemaphore(args.thredlimit)
else:
pass
scraper_options = {'chrome_options': chrome_options,
'new_file': True}
scraper = Scraper(thread_limiter, const.OUTPUT_FILE, scraper_options)
start = 900032001799568
for chip_num in tqdm(range(start, start + 1)):
tread = threading.Thread(target=scraper.run, args=[chip_num])
tread.start()
if __name__ == '__main__':
main()
| 29.574074 | 92 | 0.644333 |
acf63b4ab57efe9d8d33044ffcf551aa862764b6 | 2,088 | py | Python | telemetry/telemetry/internal/util/binary_manager_unittest.py | ravitejavalluri/catapult | 246a39a82c2213d913a96fff020a263838dc76e6 | [
"BSD-3-Clause"
] | 4 | 2017-06-04T05:37:39.000Z | 2021-06-26T05:30:15.000Z | telemetry/telemetry/internal/util/binary_manager_unittest.py | ravitejavalluri/catapult | 246a39a82c2213d913a96fff020a263838dc76e6 | [
"BSD-3-Clause"
] | 9 | 2017-09-10T19:49:04.000Z | 2018-04-14T04:39:55.000Z | telemetry/telemetry/internal/util/binary_manager_unittest.py | ravitejavalluri/catapult | 246a39a82c2213d913a96fff020a263838dc76e6 | [
"BSD-3-Clause"
] | 2 | 2017-09-10T20:30:38.000Z | 2017-09-12T19:50:03.000Z | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.core import exceptions
from telemetry.internal.util import binary_manager
import mock
class BinaryManagerTest(unittest.TestCase):
def setUp(self):
# We need to preserve the real initialized dependecny_manager.
self.actual_binary_manager = binary_manager._binary_manager
binary_manager._binary_manager = None
def tearDown(self):
binary_manager._binary_manager = self.actual_binary_manager
def testReinitialization(self):
binary_manager.InitDependencyManager(None)
self.assertRaises(exceptions.InitializationError,
binary_manager.InitDependencyManager, None)
@mock.patch('py_utils.binary_manager.BinaryManager')
def testFetchPathInitialized(self, binary_manager_mock):
expected = [mock.call.binary_manager.BinaryManager(
['base_config_object']),
mock.call.binary_manager.BinaryManager().FetchPath(
'dep', 'plat_arch')]
binary_manager.InitDependencyManager(None)
binary_manager.FetchPath('dep', 'plat', 'arch')
binary_manager_mock.assert_call_args(expected)
def testFetchPathUninitialized(self):
self.assertRaises(exceptions.InitializationError,
binary_manager.FetchPath, 'dep', 'plat', 'arch')
@mock.patch('py_utils.binary_manager.BinaryManager')
def testLocalPathInitialized(self, binary_manager_mock):
expected = [mock.call.binary_manager.BinaryManager(
['base_config_object']),
mock.call.binary_manager.BinaryManager().LocalPath(
'dep', 'plat_arch')]
binary_manager.InitDependencyManager(None)
binary_manager.LocalPath('dep', 'plat', 'arch')
binary_manager_mock.assert_call_args(expected)
def testLocalPathUninitialized(self):
self.assertRaises(exceptions.InitializationError,
binary_manager.LocalPath, 'dep', 'plat', 'arch')
| 39.396226 | 72 | 0.724617 |
acf63b4d1ac0a813d1d00465069b980ca5b21766 | 634 | py | Python | 2015/day8.py | tobeannouncd/AdventOfCode | b4fe6e9b10a0dc191429a90ab351806df03bca10 | [
"MIT"
] | null | null | null | 2015/day8.py | tobeannouncd/AdventOfCode | b4fe6e9b10a0dc191429a90ab351806df03bca10 | [
"MIT"
] | null | null | null | 2015/day8.py | tobeannouncd/AdventOfCode | b4fe6e9b10a0dc191429a90ab351806df03bca10 | [
"MIT"
] | null | null | null | import re
import advent
HEX_PAT = re.compile(r'\\x[a-f0-9]{2}')
def parse_string(s: str) -> str:
s = s[1:-1].replace('\\\\', '\\').replace('\\"', '"')
hex_list = HEX_PAT.findall(s)
for h in hex_list:
s = s.replace(h, chr(int(h[-2:], 16)))
return s
def encode_string(s: str) -> str:
s=s.replace('\\','\\\\').replace('"','\\"')
return f'"{s}"'
def main():
t = advent.get_input(2015, 8).split('\n')
print('Part 1:', sum(len(line) - len(parse_string(line)) for line in t))
print('Part 2:', sum(len(encode_string(line)) - len(line) for line in t))
if __name__ == '__main__':
main()
| 22.642857 | 77 | 0.550473 |
acf63c872e6f8b2aade1951f5d8944fa38dac0a5 | 858 | py | Python | constants.py | pockeleewout/CapsNet | 530142c12b3682fe36d173fa85e6650d650e3989 | [
"MIT"
] | null | null | null | constants.py | pockeleewout/CapsNet | 530142c12b3682fe36d173fa85e6650d650e3989 | [
"MIT"
] | null | null | null | constants.py | pockeleewout/CapsNet | 530142c12b3682fe36d173fa85e6650d650e3989 | [
"MIT"
] | null | null | null | import os
# Directory to save models
SAVE_DIR = "saved_models"
# Directory to save plots
PLOT_DIR = "plots"
# Directory to save logs
LOG_DIR = "logs"
# Directory to save options
OPTIONS_DIR = "options"
# Directory to save images
IMAGES_SAVE_DIR = "reconstructions"
# Directory to save smallNorb Dataset
SMALL_NORB_PATH = os.path.join("datasets", "smallNORB")
# Default values for command arguments
DEFAULT_LEARNING_RATE = 0.001
DEFAULT_ANNEAL_TEMPERATURE = 8 # Anneal Alpha
DEFAULT_ALPHA = 0.0005 # Scaling factor for reconstruction loss
DEFAULT_DATASET = "small_norb" # 'mnist', 'small_norb'
DEFAULT_DECODER = "FC" # 'FC' or 'Conv'
DEFAULT_BATCH_SIZE = 128
DEFAULT_EPOCHS = 300 # DEFAULT_EPOCHS = 300
DEFAULT_USE_GPU = True
DEFAULT_ROUTING_ITERATIONS = 3
DEFAULT_VALIDATION_SIZE = 1000
# Random seed for validation split
VALIDATION_SEED = 889256487
| 28.6 | 64 | 0.778555 |
acf63cdf269a5527e0fa00bbc924e20cfc06f563 | 10,638 | py | Python | pox/ext/run.py | calm11/TP1 | 69d49ba55a6146fc88df394d1f3ee1811ff7f4bf | [
"Apache-2.0"
] | null | null | null | pox/ext/run.py | calm11/TP1 | 69d49ba55a6146fc88df394d1f3ee1811ff7f4bf | [
"Apache-2.0"
] | null | null | null | pox/ext/run.py | calm11/TP1 | 69d49ba55a6146fc88df394d1f3ee1811ff7f4bf | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import os
import sys
import networkx as nx
import matplotlib.pyplot as plt
import argparse
import random
from mininet.net import Mininet
from mininet.node import CPULimitedHost
from mininet.link import TCLink
from mininet.node import OVSController
from mininet.node import Controller
from mininet.node import RemoteController
from mininet.cli import CLI
from mininet.util import custom, pmonitor
sys.path.append("../../")
from subprocess import Popen
from time import sleep, time
# Construtores de classe para controladores e topologias
from pox.ext.controllers import JellyfishController
from topologies import topologies, dpid_to_ip_addr, dpid_to_mac_addr
def test_ping(net):
"""
Simple test to make sure all hosts in a topology
can ping each other.
"""
print("\n\n==== Running ping all test ...")
try:
net.start()
sleep(3)
net.pingAll()
except KeyboardInterrupt:
pass
finally:
CLI(net)
net.stop()
def get_permutation_traffic_dict(hosts):
"""
Returns a dictionary that specifies what host
should send traffic to what host:
hx --> hy
"""
hosts_ = hosts[:]
send_dict = {}
for h in hosts:
send_idx = random.choice(range(len(hosts_)))
# We should not send to ourselves.
# I THINK this will always terminate.
while hosts_[send_idx] == h:
send_idx = random.choice(range(len(hosts_)))
send_dict[h] = hosts_[send_idx]
del hosts_[send_idx]
return send_dict
def update_server_throughputs(host_lines, host_throughput, rounds):
"""
Parses the host output lines that we care about (the ones that contain)
their reported iperf throughput.
Adds the throughput values to the host_throughput dictionary, accumulating
throughput values across rounds.
"""
for h in host_lines:
if h not in host_throughput:
host_throughput[h] = 0
raw_val = float(host_lines[h].split()[-2]) / rounds
if host_lines[h].split()[-1].startswith("Gbits"):
raw_val *= 1000
elif host_lines[h].split()[-1].startswith("Kbits"):
raw_val /= 1000
host_throughput[h] += raw_val
def monitor_throughput(popens, P, rounds, host_throughput):
"""
Prints process information from different network hosts.
See: https://github.com/mininet/mininet/blob/master/examples/popen.py
The purpose of this is to catch the throughput measurements of
the various iperf tasks.
"""
host_lines = {}
for host, line in pmonitor(popens):
if host:
# Catch the lines of output we care about. Namely
# the ones that contain the Bandwith readings of
# Mbits/sec or Gbits / sec
if P == 1:
if 'Bytes' in line:
host_lines[host.name] = line.strip()
else:
if '[SUM]' in line:
host_lines[host.name] = line.strip()
#print("<%s>: %s" % (host.name, line.strip()))
# Update the per-server throughput values after each round.
update_server_throughputs(host_lines, host_throughput, rounds)
NIC_RATE = 10
# Mb/s, which we set
def rand_perm_traffic(net, P=1, rounds=5):
"""
Tests the topology using random permutation traffic,
as descibed in the Jellyfish paper.
P is the number of parallel flows to send from each host
to another host.
"""
#send_dict = get_permutation_traffic_dict(net.topo.hosts())
# At the end of the loop below,
# will map [h] -> average throughput across rounds
host_throughput = {}
try:
net.start()
# For a certain number of rounds, run iperf on random permutation
# pairs of hosts.
#
# TODO: should the randum permutation matrix be recalculated after every
# round, or once before the 5 rounds?
for i in range(rounds):
send_dict = get_permutation_traffic_dict(net.topo.hosts())
print(" \n ROUND %d \n" % (i+1))
popens = {}
for h in send_dict:
from_host_name = h
to_host_name = send_dict[h]
from_host, to_host = net.getNodeByName(from_host_name, to_host_name)
from_ip = from_host.IP()
to_ip = to_host.IP()
# Set iperf server on receiver
to_host.popen('iperf -s')
# Set an iperf client on sender
popens[from_host] = from_host.popen('iperf -c %s -P %s' % (to_ip, P))
# Get the output from the iperf commands, and update the
# host_throughput dictionary.
print(" \n Throughput watch... ")
monitor_throughput(popens, P, rounds, host_throughput)
print(" \n End throughput watch... ")
except KeyboardInterrupt:
pass
finally:
net.stop()
print("\n ~~ Results ~~ ")
print("\n Individual host throughput averages, in Mbits/sec")
print(host_throughput) # values in MBits/s
avg_throughput = float(sum(host_throughput.values()))
if len(host_throughput.items()) == 0:
print("There weren't any throughput items!")
else:
avg_throughput /= len(host_throughput.items())
if len(host_throughput.items()) != len(net.hosts):
print("ERROR: incorrect number of host readings: %d/%d"
% (len(host_throughput.items()), len(net.hosts)))
# NOTE: we are setting the NIC rate by specifying the bandwidth
# field of each TCLink object.
print('Average server throughput: {}'.format(avg_throughput))
print('Percentage of NIC rate: {:.1%}'.format(avg_throughput/NIC_RATE))
def print_switches(net, n_interfaces=3):
"""
n_interfaces is the number of interfaces
available on the switch.
"""
print(" --- Switches --- ")
for s in net.switches:
print("\n---------")
print(s.__repr__())
for i in range(n_interfaces):
print(s.MAC(intf="%s-eth%d" % (s, i+1)))
print(s.IP)
print(s.dpid)
def print_hosts(net):
print(" --- Hosts --- ")
for h in net.hosts:
print("\n---------")
print(h.IP)
print(h.IP())
print(h.MAC())
print(h.config())
def set_switch_eths_ips(net, n_interfaces=3):
"""
Sets the ethernet address of interfaces and their
ip addresses as well.
on all switches according to some scheme we design.
The scheme is currently as follows:
MaC address for interface x: is x.<dpid converted to MAC>
IP address for interface x: is x.<dpid converted to IP>
n_interfaces is the number of interfaces on each switch.
NOTE: this function is not called, and I don't think it's useful.
But we can keep it here as an example.
"""
for s in net.switches:
mac_ = dpid_to_mac_addr(int(s.dpid))
ip_ = dpid_to_ip_addr(int(s.dpid))
for i in range(n_interfaces):
# NOTE: set the interface verbatim as the left most elem
mac = "%02d" % (i + 1) + mac_[2:]
ip = str(i+1) + ip_[1:]
# for s_ in net.switches:
s.setMAC(mac=mac, intf="%s-eth%d" % (s, i + 1))
print("Setting " + "%s-eth%d" % (s, i + 1) + " to %s" % mac)
s.setIP(ip=ip, intf="%s-eth%d" % (s, i + 1))
def set_host_arps(net):
"""
Sets the ARP tables of the network hosts.
Every host hx must know the MAC address of host hy if it is to send
any traffic to it.
"""
hosts = net.hosts
for h in hosts:
for h_ in hosts:
if h == h_: continue
h.setARP(h_.IP(), h_.MAC())
print("Set arp on host %s for IP %s to mac %s" % (str(h), h_.IP(), h_.MAC()))
def print_topo_info(net):
print_hosts(net)
print_switches(net)
# Set up argument parser.
parser = argparse.ArgumentParser()
parser.add_argument('-display', action='store_true')
parser.add_argument('-pingtest', action='store_true')
parser.add_argument('-randpermtraffic', action='store_true')
parser.add_argument('-cli', action='store_true')
parser.add_argument('-t','--topology',
help='What topology from pox.ext.topologies to use with arguments', required=True)
parser.add_argument('-f','--flows',
help='Number of flows to test with random permutation traffic')
parser.add_argument('-r','--routing',
help='One of ecmp, kshort. What routing algorithm to use', required=True)
parser.add_argument('-s','--seed',
help='What random seed to use for this experiment.', required=True)
if __name__ == '__main__':
args = vars(parser.parse_args())
# We only support Jellyfish topologies.
if not args['topology'].startswith("jelly"):
print("We only support the 'Jelly' topology.")
raise SystemExit
topology_args = args['topology'].split(',')
topo_name = topology_args[0]
n = int(topology_args[1])
k = int(topology_args[2])
r = int(topology_args[3])
seed = int(args['seed'])
random.seed(seed)
routing = args['routing']
if routing not in ['ecmp', 'kshort']:
print("We only know ECMP and KSHORT routing")
raise SystemExit
topo = topologies[topo_name](random_seed=seed,
n=n,
k=k,
r=r)
# Persist to file so controller can read this info.
with open('__jellyconfig', 'w', os.O_NONBLOCK) as config_file:
config_file.write('n=%d\n' % n)
config_file.write('k=%d\n' % k)
config_file.write('r=%d\n' % r)
config_file.write('seed=%d\n' % seed)
config_file.write('routing=%s\n' % routing)
config_file.flush()
# Create Mininet network with a custom controller
net = Mininet(topo=topo, controller=JellyfishController, link=TCLink)
# We need to tell each host the MAC address of every other host.
set_host_arps(net)
# Display the topology
if args['display']:
print("\n\n==== Displaying topology ...")
g = nx.Graph()
g.add_nodes_from(topo.nodes())
g.add_edges_from(topo.links())
nx.draw(g, with_labels=True)
plt.show()
#print(net.links[0])
#print((net.links[0].intf1.MAC(), net.links[0].intf2.MAC()))
# What experiment to run?
if args['pingtest']:
test_ping(net)
elif args['randpermtraffic']:
P = 1
if 'flows' in args:
P = int(args['flows'])
rand_perm_traffic(net, P=P)
elif args['cli']:
CLI(net)
| 31.850299 | 89 | 0.611205 |
acf63d308e40f774064598722629f911fa08ddb7 | 209 | py | Python | check_db.py | wangdashuai888/wechat_checkbook | 26b1e861fb4806f688eb8c303384080101c5b497 | [
"MIT"
] | 1 | 2022-01-10T13:31:42.000Z | 2022-01-10T13:31:42.000Z | check_db.py | wangdashuai888/wechat_checkbook | 26b1e861fb4806f688eb8c303384080101c5b497 | [
"MIT"
] | null | null | null | check_db.py | wangdashuai888/wechat_checkbook | 26b1e861fb4806f688eb8c303384080101c5b497 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 12 16:19:16 2020
@author: taotao
"""
def status_check():
print('进入数据库检测')
return
def item_entry(item_price):
print('开始数据写入')
return
| 13.0625 | 35 | 0.660287 |
acf63d5f4e77facea6cb55f959f9950319e16b56 | 6,252 | py | Python | mne_nirs/statistics/tests/test_glm_type.py | drammock/mne-nirs | 2deb73184b4609d0a72495e65565f430bbff0704 | [
"BSD-3-Clause"
] | null | null | null | mne_nirs/statistics/tests/test_glm_type.py | drammock/mne-nirs | 2deb73184b4609d0a72495e65565f430bbff0704 | [
"BSD-3-Clause"
] | null | null | null | mne_nirs/statistics/tests/test_glm_type.py | drammock/mne-nirs | 2deb73184b4609d0a72495e65565f430bbff0704 | [
"BSD-3-Clause"
] | null | null | null | # Authors: Robert Luke <mail@robertluke.net>
#
# License: BSD (3-clause)
import os
import pandas
import pytest
import numpy as np
import matplotlib
from matplotlib.pyplot import Axes
import mne
import nilearn
from mne_nirs.statistics import RegressionResults, read_glm
from mne_nirs.experimental_design import make_first_level_design_matrix
from mne_nirs.statistics import run_glm
def _get_minimal_haemo_data(tmin=0, tmax=60):
raw = mne.io.read_raw_nirx(os.path.join(
mne.datasets.fnirs_motor.data_path(), 'Participant-1'), preload=False)
raw.crop(tmax=tmax, tmin=tmin)
raw = mne.preprocessing.nirs.optical_density(raw)
raw = mne.preprocessing.nirs.beer_lambert_law(raw)
raw.resample(0.3)
return raw
def _get_glm_result(tmax=60, tmin=0, noise_model='ar1'):
raw = _get_minimal_haemo_data(tmin=tmin, tmax=tmax)
design_matrix = make_first_level_design_matrix(raw, stim_dur=5.,
drift_order=1,
drift_model='polynomial')
return run_glm(raw, design_matrix, noise_model=noise_model)
def _get_glm_contrast_result(tmin=60, tmax=400):
raw = _get_minimal_haemo_data(tmin=tmin, tmax=tmax)
design_matrix = make_first_level_design_matrix(raw, stim_dur=5.,
drift_order=1,
drift_model='polynomial')
glm_est = run_glm(raw, design_matrix)
contrast_matrix = np.eye(design_matrix.shape[1])
basic_conts = dict([(column, contrast_matrix[i])
for i, column in enumerate(design_matrix.columns)])
contrast_LvR = basic_conts['1.0'] - basic_conts['2.0']
return glm_est.compute_contrast(contrast_LvR)
def test_create_results_glm():
# Create a relevant info structure
raw = _get_minimal_haemo_data()
# Create a minimal structure
res = _get_glm_result()
# Get arguments for type so we can test errors below
info = raw.info
minimal_structure = res._data
# Test construction
with pytest.raises(TypeError, match='must be a dictionary'):
_ = RegressionResults(info, np.zeros((5, 2)), 1)
with pytest.raises(TypeError, match='must be a dictionary'):
_ = RegressionResults(info, 3.2, 1)
with pytest.raises(TypeError, match='must be a dictionary'):
_ = RegressionResults(info, [], 1)
with pytest.raises(TypeError, match='must be a dictionary'):
_ = RegressionResults(info, "hbo", 1)
with pytest.raises(TypeError, match='keys must match'):
_ = RegressionResults(info, _take(4, minimal_structure), 1)
onewrongname = _take(55, minimal_structure)
onewrongname["test"] = onewrongname["S1_D1 hbo"]
with pytest.raises(TypeError, match='must match ch_names'):
_ = RegressionResults(info, onewrongname, 1)
# Test properties
assert len(res) == len(raw.ch_names)
def test_results_glm_properties():
n_channels = 56
res = _get_glm_result()
# Test ContainsMixin
assert 'hbo' in res
assert 'hbr' in res
assert 'meg' not in res
# Test copy
assert len(res) == len(res.copy())
# Test picks
assert len(res.copy().pick(picks=range(4))) == 4
assert len(res.copy().pick(picks="S1_D1 hbr")) == 1
assert len(res.copy().pick(picks=["S1_D1 hbr"])) == 1
assert len(res.copy().pick(picks=["S1_D1 hbr", "S1_D1 hbo"])) == 2
assert len(res.copy().pick(picks=["S1_D1 hbr", "S1_D1 XXX"])) == 1
assert len(res.copy().pick(picks=["S1_D1 hbr", "S1_D1 hbr"])) == 1
assert len(res.copy().pick(picks="fnirs")) == n_channels
assert len(res.copy().pick(picks="hbo")) == n_channels / 2
assert len(res.copy().pick(picks="hbr")) == n_channels / 2
# Test results
assert len(res.theta()) == n_channels
assert len(res.copy().pick(picks=range(4)).theta()) == 4
assert len(res.copy().pick(picks=3).theta()) == 1
assert res.copy().pick(picks=3).theta()[0].shape == (3, 1)
# Test models
assert len(res.model()) == n_channels
assert len(res.copy().pick(picks=range(8)).model()) == 8
assert type(res.model()[0]) is nilearn.glm.regression.ARModel
assert isinstance(res.to_dataframe(), pandas.DataFrame)
def test_glm_scatter():
assert isinstance(_get_glm_result().scatter(), Axes)
assert isinstance(_get_glm_contrast_result().scatter(), Axes)
_get_glm_result(tmax=2974, tmin=0).surface_projection(condition="3.0",
view="dorsal")
def test_results_glm_export_dataframe():
n_channels = 56
res = _get_glm_result(tmax=400)
df = res.to_dataframe()
assert df.shape == (6 * n_channels, 12)
def test_create_results_glm_contrast():
# Create a minimal structure
res = _get_glm_contrast_result()
assert isinstance(res._data, nilearn.glm.contrasts.Contrast)
assert isinstance(res.info, mne.Info)
# Test copy
assert len(res) == len(res.copy())
assert isinstance(res.plot_topo(), matplotlib.figure.Figure)
n_channels = 56
assert isinstance(res.to_dataframe(), pandas.DataFrame)
df = res.to_dataframe()
assert df.shape == (n_channels, 10)
def test_results_glm_io():
res = _get_glm_result(tmax=400)
res.save("test-regression-glm.h5", overwrite=True)
loaded_res = read_glm("test-regression-glm.h5")
assert loaded_res.to_dataframe().equals(res.to_dataframe())
assert res == loaded_res
res = _get_glm_result(tmax=400, noise_model='ols')
res.save("test-regression-ols_glm.h5", overwrite=True)
loaded_res = read_glm("test-regression-ols_glm.h5")
assert loaded_res.to_dataframe().equals(res.to_dataframe())
assert res == loaded_res
res = _get_glm_contrast_result()
res.save("test-contrast-glm.h5", overwrite=True)
loaded_res = read_glm("test-contrast-glm.h5")
assert loaded_res.to_dataframe().equals(res.to_dataframe())
assert res == loaded_res
with pytest.raises(IOError, match='must end with glm.h5'):
res.save("test-contrast-glX.h5", overwrite=True)
def _take(n, mydict):
"""Return first n items of the iterable as a list"""
return {k: mydict[k] for k in list(mydict)[:n]}
| 33.079365 | 78 | 0.664267 |
acf64037fd718e8b72f22482467a98a89d92368d | 9,291 | py | Python | losses/losses.py | blafabregue/TimeSeriesDeepClustering | 85f4ab2fd45bda3296c6b0861ee11e6c7a77c594 | [
"Apache-2.0"
] | 6 | 2021-03-05T18:48:22.000Z | 2022-03-23T08:42:16.000Z | losses/losses.py | blafabregue/TimeSeriesDeepClustering | 85f4ab2fd45bda3296c6b0861ee11e6c7a77c594 | [
"Apache-2.0"
] | null | null | null | losses/losses.py | blafabregue/TimeSeriesDeepClustering | 85f4ab2fd45bda3296c6b0861ee11e6c7a77c594 | [
"Apache-2.0"
] | 4 | 2021-06-29T00:53:14.000Z | 2022-03-10T11:55:32.000Z | """
Define loss used in the experiment
The triplet loss is based on torch implementation https://github.com/White-Link/UnsupervisedScalableRepresentationLearningTimeSeries
and article:
Franceschi, J. Y., Dieuleveut, A., & Jaggi, M. (2019).
Unsupervised scalable representation learning for multivariate time series
Author:
Baptiste Lafabregue 2019.25.04
"""
import numpy as np
from tensorflow.keras import backend as K
import tensorflow as tf
import utils
class TripletLoss(object):
def __init__(self, encoder, train_set, compared_length,
nb_random_samples, negative_penalty, fixed_time_dim=False):
self.encoder = encoder
self.train_set = train_set
self.compared_length = compared_length
self.nb_random_samples = nb_random_samples
self.negative_penalty = negative_penalty
self.fixed_time_dim = fixed_time_dim
def compute_loss(self, batch, noisy_batch=None, training=True):
batch_size = batch.shape[0]
train_size = self.train_set.shape[0]
length = min(self.compared_length, self.train_set.shape[1])
fixed_length = self.train_set.shape[1]
# For each batch element, we pick nb_random_samples possible random
# time series in the training set (choice of batches from where the
# negative examples will be sampled)
samples = np.random.choice(
train_size, size=(self.nb_random_samples, batch_size)
)
# Choice of length of positive and negative samples
length_pos_neg = np.random.randint(1, high=length + 1)
# We choose for each batch example a random interval in the time
# series, which is the 'anchor'
random_length = np.random.randint(
length_pos_neg, high=length + 1
) # Length of anchors
beginning_batches = np.random.randint(
0, high=length - random_length + 1, size=batch_size
) # Start of anchors
# The positive samples are chosen at random in the chosen anchors
beginning_samples_pos = np.random.randint(
0, high=random_length - length_pos_neg + 1, size=batch_size
) # Start of positive samples in the anchors
# Start of positive samples in the batch examples
beginning_positive = beginning_batches + beginning_samples_pos
# End of positive samples in the batch examples
end_positive = beginning_positive + length_pos_neg
# We randomly choose nb_random_samples potential negative samples for
# each batch example
beginning_samples_neg = np.random.randint(
0, high=length - length_pos_neg + 1,
size=(self.nb_random_samples, batch_size)
)
anchor = K.concatenate([batch[j: j + 1,
beginning_batches[j]: beginning_batches[j] + random_length,
:,
] for j in range(batch_size)], axis=0)
if self.fixed_time_dim:
anchor = tf.pad(anchor, tf.constant([[0, 0], [0, fixed_length-random_length], [0, 0]]))
representation = self.encoder(
anchor,
training=training
) # Anchors representations
positive = K.concatenate([batch[j: j + 1,
end_positive[j] - length_pos_neg: end_positive[j],
:,
] for j in range(batch_size)], axis=0)
if self.fixed_time_dim:
positive = tf.pad(positive, tf.constant([[0, 0], [0, fixed_length-length_pos_neg], [0, 0]]))
positive_representation = self.encoder(
positive,
training=training
) # Positive samples representations
size_representation = K.int_shape(representation)[1]
# Positive loss: -logsigmoid of dot product between anchor and positive
# representations
loss = -K.mean(K.log(K.sigmoid(K.batch_dot(
K.reshape(representation, (batch_size, 1, size_representation)),
K.reshape(positive_representation, (batch_size, size_representation, 1)))
)))
multiplicative_ratio = self.negative_penalty / self.nb_random_samples
for i in range(self.nb_random_samples):
# Negative loss: -logsigmoid of minus the dot product between
# anchor and negative representations
negative = K.concatenate([self.train_set[samples[i, j]: samples[i, j] + 1]
[:,
beginning_samples_neg[i, j]:beginning_samples_neg[i, j] + length_pos_neg,
:] for j in range(batch_size)], axis=0)
if self.fixed_time_dim:
negative = tf.pad(negative, tf.constant([[0, 0], [0, fixed_length-length_pos_neg], [0, 0]]))
negative_representation = self.encoder(
negative,
training=training
)
loss += multiplicative_ratio * -K.mean(
K.log(K.sigmoid(-K.batch_dot(
K.reshape(representation, (batch_size, 1, size_representation)),
K.reshape(negative_representation, (batch_size, size_representation, 1))
)))
)
return loss
class MSELoss(object):
def __init__(self, autoencoder):
self.autoencoder = autoencoder
self.loss = tf.keras.losses.MeanSquaredError()
def compute_loss(self, batch, noisy_batch=None, training=True):
if noisy_batch is None:
noisy_batch = batch
decoding = self.autoencoder(noisy_batch, training=training)
# y_pred = ops.convert_to_tensor(decoding)
# # y_true = math_ops.cast(batch, y_pred.dtype)
# # return K.mean(math_ops.squared_difference(y_pred, y_true))
return self.loss(batch, decoding)
class JointLearningLoss(object):
def __init__(self, layers_generator, hlayer_loss_param=0.1):
if not layers_generator.support_joint_training:
raise utils.CompatibilityException('architecture incompatible with Joint Learning loss')
self.encoder = layers_generator.get_all_layers_encoder()
self.decoder = layers_generator.get_all_layers_decoder()
self.loss = tf.keras.losses.MeanSquaredError()
self.hlayer_loss_param = hlayer_loss_param
def compute_loss(self, batch, noisy_batch=None, training=True):
if noisy_batch is None:
noisy_batch = batch
encoding = self.encoder(noisy_batch, training=training)
decoding = self.decoder(encoding[-1], training=training)
loss = 0
for i in range(len(encoding) - 1):
loss += self.hlayer_loss_param*self.loss(encoding[i], decoding[-2 - i])
loss += self.loss(batch, decoding[-1])
return loss
class VAELoss(object):
def __init__(self, encoder, decoder):
self.encoder = encoder
self.decoder = decoder
if K.floatx() == 'float64':
dtype = tf.dtypes.float64
else:
dtype = tf.dtypes.float32
self.dtype = dtype
def log_normal_pdf(self, sample, mean, logvar, raxis=1):
log2pi = tf.math.log(2. * np.pi)
log2pi = tf.cast(log2pi, self.dtype)
return tf.reduce_sum(-.5 * ((sample - mean) ** 2. * tf.cast(tf.exp(-logvar), self.dtype) + logvar + log2pi),
axis=raxis)
def compute_loss(self, batch, noisy_batch=None, training=True):
mean, logvar = tf.split(self.encoder(batch), num_or_size_splits=2, axis=1)
epsilon = K.random_normal(shape=mean.shape)
z = mean + K.exp(logvar / 2) * epsilon
x_logit = self.decoder(z, training=training)
# we use the classic mse because values are z-normalized (so not necessarily between 0 and 1)
recon = K.sum(tf.keras.losses.mean_squared_error(batch, x_logit), axis=1)
kl = 0.5 * K.sum(K.exp(logvar) + K.square(mean) - 1. - logvar, axis=1)
return recon + kl
class SiameseTSLoss(object):
def __init__(self, autoencoder1, autoencoder2, filter1, filter2):
self.autoencoder1 = autoencoder1
self.autoencoder2 = autoencoder2
self.filter1 = filter1
self.filter2 = filter2
self.loss = tf.keras.losses.MeanSquaredError()
def compute_loss(self, batch, noisy_batch=None, training=True):
if noisy_batch is None:
noisy_batch = batch
decoding1 = self.autoencoder1(noisy_batch, training=training)
loss1 = self.loss(self.filter1(batch), decoding1)
decoding2 = self.autoencoder2(noisy_batch, training=training)
loss2 = self.loss(self.filter2(batch), decoding2)
return loss1 + loss2
class CombinedLoss(object):
def __init__(self, losses, weights=None):
self.losses = losses
self.weights = weights
if self.weights is None:
self.weights = np.ones_like(self.losses)
def compute_loss(self, batch, noisy_batch=None, training=True):
total_loss = 0
for loss, weight in zip(self.losses, self.weights):
total_loss += weight * loss.compute_loss(batch, noisy_batch=noisy_batch, training=training)
return total_loss
| 41.851351 | 132 | 0.63061 |
acf64120306f7336e96fb209770bc584ea49eac0 | 9,395 | py | Python | kga2c/representations.py | SkittlePox/KG-A2C | 4f0163abaef6fdf39ac8b6783c3abe84a96d5bad | [
"MIT"
] | 49 | 2020-01-24T19:52:58.000Z | 2022-02-18T01:06:57.000Z | kga2c/representations.py | SkittlePox/KG-A2C | 4f0163abaef6fdf39ac8b6783c3abe84a96d5bad | [
"MIT"
] | 1 | 2020-02-19T20:10:42.000Z | 2020-03-03T01:07:49.000Z | kga2c/representations.py | SkittlePox/KG-A2C | 4f0163abaef6fdf39ac8b6783c3abe84a96d5bad | [
"MIT"
] | 14 | 2020-01-28T02:38:03.000Z | 2022-01-05T17:31:43.000Z | import networkx as nx
import numpy as np
import openie
from fuzzywuzzy import fuzz
from jericho.util import clean
class StateAction(object):
def __init__(self, spm, vocab, vocab_rev, tsv_file, max_word_len):
self.graph_state = nx.DiGraph()
self.max_word_len = max_word_len
self.graph_state_rep = []
self.visible_state = ""
self.drqa_input = ""
self.vis_pruned_actions = []
self.pruned_actions_rep = []
self.sp = spm
self.vocab_act = vocab
self.vocab_act_rev = vocab_rev
self.vocab_kge = self.load_vocab_kge(tsv_file)
self.adj_matrix = np.zeros((len(self.vocab_kge['entity']), len(self.vocab_kge['entity'])))
self.room = ""
def visualize(self):
# import matplotlib.pyplot as plt
pos = nx.spring_layout(self.graph_state)
edge_labels = {e: self.graph_state.edges[e]['rel'] for e in self.graph_state.edges}
print(edge_labels)
nx.draw_networkx_edge_labels(self.graph_state, pos, edge_labels)
nx.draw(self.graph_state, pos=pos, with_labels=True, node_size=200, font_size=10)
#plt.show()
def load_vocab_kge(self, tsv_file):
ent = {}
with open(tsv_file, 'r') as f:
for line in f:
e, eid = line.split('\t')
ent[e.strip()] = int(eid.strip())
rel = {}
with open(tsv_file, 'r') as f:
for line in f:
r, rid = line.split('\t')
rel[r.strip()] = int(rid.strip())
return {'entity': ent, 'relation': rel}
def update_state(self, visible_state, inventory_state, objs, prev_action=None, cache=None):
prev_room = self.room
graph_copy = self.graph_state.copy()
con_cs = [graph_copy.subgraph(c) for c in nx.weakly_connected_components(graph_copy)]
prev_room_subgraph = None
prev_you_subgraph = None
for con_c in con_cs:
for node in con_c.nodes:
node = set(str(node).split())
if set(prev_room.split()).issubset(node):
prev_room_subgraph = nx.induced_subgraph(graph_copy, con_c.nodes)
for edge in self.graph_state.edges:
if 'you' in edge[0]:
graph_copy.remove_edge(*edge)
self.graph_state = graph_copy
visible_state = visible_state.split('\n')
room = visible_state[0]
visible_state = clean(' '.join(visible_state[1:]))
dirs = ['north', 'south', 'east', 'west', 'southeast', 'southwest', 'northeast', 'northwest', 'up', 'down']
self.visible_state = str(visible_state)
rules = []
if cache is None:
sents = openie.call_stanford_openie(self.visible_state)['sentences']
else:
sents = cache
if sents == "":
return []
in_aliases = ['are in', 'are facing', 'are standing', 'are behind', 'are above', 'are below', 'are in front']
in_rl = []
in_flag = False
for i, ov in enumerate(sents):
sent = ' '.join([a['word'] for a in ov['tokens']])
triple = ov['openie']
for d in dirs:
if d in sent and i != 0:
rules.append((room, 'has', 'exit to ' + d))
for tr in triple:
h, r, t = tr['subject'].lower(), tr['relation'].lower(), tr['object'].lower()
if h == 'you':
for rp in in_aliases:
if fuzz.token_set_ratio(r, rp) > 80:
r = "in"
in_rl.append((h, r, t))
in_flag = True
break
if h == 'it':
break
if not in_flag:
rules.append((h, r, t))
if in_flag:
cur_t = in_rl[0]
for h, r, t in in_rl:
if set(cur_t[2].split()).issubset(set(t.split())):
cur_t = h, r, t
rules.append(cur_t)
room = cur_t[2]
try:
items = inventory_state.split(':')[1].split('\n')[1:]
for item in items:
rules.append(('you', 'have', str(' ' .join(item.split()[1:]))))
except:
pass
if prev_action is not None:
for d in dirs:
if d in prev_action and self.room != "":
rules.append((prev_room, d + ' of', room))
if prev_room_subgraph is not None:
for ed in prev_room_subgraph.edges:
rules.append((ed[0], prev_room_subgraph[ed]['rel'], ed[1]))
break
for o in objs:
#if o != 'all':
rules.append((str(o), 'in', room))
add_rules = rules
for rule in add_rules:
u = '_'.join(str(rule[0]).split())
v = '_'.join(str(rule[2]).split())
if u in self.vocab_kge['entity'].keys() and v in self.vocab_kge['entity'].keys():
if u != 'it' and v != 'it':
self.graph_state.add_edge(rule[0], rule[2], rel=rule[1])
return add_rules, sents
def get_state_rep_kge(self):
ret = []
self.adj_matrix = np.zeros((len(self.vocab_kge['entity']), len(self.vocab_kge['entity'])))
for u, v in self.graph_state.edges:
u = '_'.join(str(u).split())
v = '_'.join(str(v).split())
if u not in self.vocab_kge['entity'].keys() or v not in self.vocab_kge['entity'].keys():
break
u_idx = self.vocab_kge['entity'][u]
v_idx = self.vocab_kge['entity'][v]
self.adj_matrix[u_idx][v_idx] = 1
ret.append(self.vocab_kge['entity'][u])
ret.append(self.vocab_kge['entity'][v])
return list(set(ret))
def get_state_kge(self):
ret = []
self.adj_matrix = np.zeros((len(self.vocab_kge['entity']), len(self.vocab_kge['entity'])))
for u, v in self.graph_state.edges:
u = '_'.join(str(u).split())
v = '_'.join(str(v).split())
if u not in self.vocab_kge['entity'].keys() or v not in self.vocab_kge['entity'].keys():
break
u_idx = self.vocab_kge['entity'][u]
v_idx = self.vocab_kge['entity'][v]
self.adj_matrix[u_idx][v_idx] = 1
ret.append(u)
ret.append(v)
return list(set(ret))
def get_obs_rep(self, *args):
ret = [self.get_visible_state_rep_drqa(ob) for ob in args]
return pad_sequences(ret, maxlen=300)
def get_visible_state_rep_drqa(self, state_description):
remove = ['=', '-', '\'', ':', '[', ']', 'eos', 'EOS', 'SOS', 'UNK', 'unk', 'sos', '<', '>']
for rm in remove:
state_description = state_description.replace(rm, '')
return self.sp.encode_as_ids(state_description)
def get_action_rep_drqa(self, action):
action_desc_num = 20 * [0]
action = str(action)
for i, token in enumerate(action.split()[:20]):
short_tok = token[:self.max_word_len]
action_desc_num[i] = self.vocab_act_rev[short_tok] if short_tok in self.vocab_act_rev else 0
return action_desc_num
def step(self, visible_state, inventory_state, objs, prev_action=None, cache=None, gat=True):
ret, ret_cache = self.update_state(visible_state, inventory_state, objs, prev_action, cache)
self.pruned_actions_rep = [self.get_action_rep_drqa(a) for a in self.vis_pruned_actions]
inter = self.visible_state #+ "The actions are:" + ",".join(self.vis_pruned_actions) + "."
self.drqa_input = self.get_visible_state_rep_drqa(inter)
self.graph_state_rep = self.get_state_rep_kge(), self.adj_matrix
return ret, ret_cache
def pad_sequences(sequences, maxlen=None, dtype='int32', value=0.):
'''
Partially borrowed from Keras
# Arguments
sequences: list of lists where each element is a sequence
maxlen: int, maximum length
dtype: type to cast the resulting sequence.
value: float, value to pad the sequences to the desired value.
# Returns
x: numpy array with dimensions (number_of_sequences, maxlen)
'''
lengths = [len(s) for s in sequences]
nb_samples = len(sequences)
if maxlen is None:
maxlen = np.max(lengths)
# take the sample shape from the first non empty sequence
# checking for consistency in the main loop below.
sample_shape = tuple()
for s in sequences:
if len(s) > 0:
sample_shape = np.asarray(s).shape[1:]
break
x = (np.ones((nb_samples, maxlen) + sample_shape) * value).astype(dtype)
for idx, s in enumerate(sequences):
if len(s) == 0:
continue # empty list was found
# pre truncating
trunc = s[-maxlen:]
# check `trunc` has expected shape
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError('Shape of sample %s of sequence at position %s is different from expected shape %s' %
(trunc.shape[1:], idx, sample_shape))
# post padding
x[idx, :len(trunc)] = trunc
return x
| 35.05597 | 117 | 0.551144 |
acf6415416e34b2f383331ba2b7b810a27e9c14a | 3,201 | py | Python | PyOpenGL-3.0.2/OpenGL/raw/GL/SGIX/fragment_lighting.py | frederica07/Dragon_Programming_Process | c0dff2e20c1be6db5adc6f9977efae8f7f888ef5 | [
"BSD-2-Clause"
] | null | null | null | PyOpenGL-3.0.2/OpenGL/raw/GL/SGIX/fragment_lighting.py | frederica07/Dragon_Programming_Process | c0dff2e20c1be6db5adc6f9977efae8f7f888ef5 | [
"BSD-2-Clause"
] | null | null | null | PyOpenGL-3.0.2/OpenGL/raw/GL/SGIX/fragment_lighting.py | frederica07/Dragon_Programming_Process | c0dff2e20c1be6db5adc6f9977efae8f7f888ef5 | [
"BSD-2-Clause"
] | null | null | null | '''Autogenerated by get_gl_extensions script, do not edit!'''
from OpenGL import platform as _p, constants as _cs, arrays
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_SGIX_fragment_lighting'
def _f( function ):
return _p.createFunction( function,_p.GL,'GL_SGIX_fragment_lighting',False)
_p.unpack_constants( """GL_FRAGMENT_LIGHTING_SGIX 0x8400
GL_FRAGMENT_COLOR_MATERIAL_SGIX 0x8401
GL_FRAGMENT_COLOR_MATERIAL_FACE_SGIX 0x8402
GL_FRAGMENT_COLOR_MATERIAL_PARAMETER_SGIX 0x8403
GL_MAX_FRAGMENT_LIGHTS_SGIX 0x8404
GL_MAX_ACTIVE_LIGHTS_SGIX 0x8405
GL_CURRENT_RASTER_NORMAL_SGIX 0x8406
GL_LIGHT_ENV_MODE_SGIX 0x8407
GL_FRAGMENT_LIGHT_MODEL_LOCAL_VIEWER_SGIX 0x8408
GL_FRAGMENT_LIGHT_MODEL_TWO_SIDE_SGIX 0x8409
GL_FRAGMENT_LIGHT_MODEL_AMBIENT_SGIX 0x840A
GL_FRAGMENT_LIGHT_MODEL_NORMAL_INTERPOLATION_SGIX 0x840B
GL_FRAGMENT_LIGHT0_SGIX 0x840C
GL_FRAGMENT_LIGHT1_SGIX 0x840D
GL_FRAGMENT_LIGHT2_SGIX 0x840E
GL_FRAGMENT_LIGHT3_SGIX 0x840F
GL_FRAGMENT_LIGHT4_SGIX 0x8410
GL_FRAGMENT_LIGHT5_SGIX 0x8411
GL_FRAGMENT_LIGHT6_SGIX 0x8412
GL_FRAGMENT_LIGHT7_SGIX 0x8413""", globals())
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum)
def glFragmentColorMaterialSGIX( face,mode ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLfloat)
def glFragmentLightfSGIX( light,pname,param ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLfloatArray)
def glFragmentLightfvSGIX( light,pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLint)
def glFragmentLightiSGIX( light,pname,param ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLintArray)
def glFragmentLightivSGIX( light,pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLfloat)
def glFragmentLightModelfSGIX( pname,param ):pass
@_f
@_p.types(None,_cs.GLenum,arrays.GLfloatArray)
def glFragmentLightModelfvSGIX( pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLint)
def glFragmentLightModeliSGIX( pname,param ):pass
@_f
@_p.types(None,_cs.GLenum,arrays.GLintArray)
def glFragmentLightModelivSGIX( pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLfloat)
def glFragmentMaterialfSGIX( face,pname,param ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLfloatArray)
def glFragmentMaterialfvSGIX( face,pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLint)
def glFragmentMaterialiSGIX( face,pname,param ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLintArray)
def glFragmentMaterialivSGIX( face,pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLfloatArray)
def glGetFragmentLightfvSGIX( light,pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLintArray)
def glGetFragmentLightivSGIX( light,pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLfloatArray)
def glGetFragmentMaterialfvSGIX( face,pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLintArray)
def glGetFragmentMaterialivSGIX( face,pname,params ):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLint)
def glLightEnviSGIX( pname,param ):pass
def glInitFragmentLightingSGIX():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( EXTENSION_NAME )
| 36.375 | 79 | 0.831615 |
acf642929333bebe1142b34a5315b7a8fafe1145 | 1,764 | py | Python | skypy/galaxies/tests/test_luminosity.py | itrharrison/skypy-itrharrison | cea1f02d1b2cd3b689266d7ae9bca1a4cfe986a2 | [
"BSD-3-Clause"
] | 88 | 2020-04-06T15:48:17.000Z | 2022-02-16T12:01:54.000Z | skypy/galaxies/tests/test_luminosity.py | itrharrison/skypy-itrharrison | cea1f02d1b2cd3b689266d7ae9bca1a4cfe986a2 | [
"BSD-3-Clause"
] | 332 | 2020-04-04T07:30:08.000Z | 2022-03-30T14:49:08.000Z | skypy/galaxies/tests/test_luminosity.py | itrharrison/skypy-itrharrison | cea1f02d1b2cd3b689266d7ae9bca1a4cfe986a2 | [
"BSD-3-Clause"
] | 41 | 2020-04-03T13:50:43.000Z | 2022-03-24T16:10:03.000Z | import numpy as np
import pytest
from scipy.stats import kstest
@pytest.mark.flaky
def test_schechter_lf_magnitude():
from skypy.galaxies.luminosity import schechter_lf_magnitude
from astropy.cosmology import default_cosmology
import pytest
# use default cosmology
cosmo = default_cosmology.get()
# Schechter function parameters for tests
M_star = -20.5
alpha = -1.3
# sample 1000 galaxies at a fixed redshift of 1.0
z = np.repeat(1.0, 1000)
M = schechter_lf_magnitude(z, M_star, alpha, 30., cosmo)
# get the distribution function
log10_x_min = -0.4*(30. - cosmo.distmod(1.0).value - M_star)
x = np.logspace(log10_x_min, log10_x_min + 3, 1000)
pdf = x**(alpha+1)*np.exp(-x)
cdf = np.concatenate([[0.], np.cumsum((pdf[1:]+pdf[:-1])/2*np.diff(np.log(x)))])
cdf /= cdf[-1]
# test the samples against the CDF
D, p = kstest(10.**(-0.4*(M - M_star)), lambda t: np.interp(t, x, cdf))
assert p > 0.01, 'D = {}, p = {}'.format(D, p)
# test for 1000 galaxies with Pareto redshift distribution
z = np.random.pareto(3., size=1000)
# for scalar parameters, sample galaxies with magnitude limit of 30
M = schechter_lf_magnitude(z, M_star, alpha, 30., cosmo)
# check that the output has the correct shape
assert np.shape(M) == (1000,)
# make sure magnitude limit was respected
M_lim = 30. - cosmo.distmod(z).value
assert np.all(M <= M_lim)
# sample with array for alpha
# not implemented at the moment
with pytest.raises(NotImplementedError):
M = schechter_lf_magnitude(z, M_star, np.broadcast_to(alpha, z.shape), 30., cosmo)
# sample with an explicit size
schechter_lf_magnitude(1.0, M_star, alpha, 30., cosmo, size=100)
| 32.666667 | 90 | 0.6661 |
acf6433f5b762b8ed8e285b70d860346234403f7 | 4,002 | py | Python | benchmark/startCirq1381.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startCirq1381.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startCirq1381.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=5
# total number=54
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=3
c.append(cirq.H.on(input_qubit[1])) # number=4
c.append(cirq.H.on(input_qubit[2])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=6
c.append(cirq.H.on(input_qubit[0])) # number=38
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=39
c.append(cirq.H.on(input_qubit[0])) # number=40
c.append(cirq.H.on(input_qubit[0])) # number=51
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=52
c.append(cirq.H.on(input_qubit[0])) # number=53
c.append(cirq.Z.on(input_qubit[1])) # number=49
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=50
c.append(cirq.H.on(input_qubit[0])) # number=32
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=33
c.append(cirq.H.on(input_qubit[0])) # number=34
c.append(cirq.H.on(input_qubit[4])) # number=21
for i in range(2):
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=41
c.append(cirq.Z.on(input_qubit[3])) # number=42
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=43
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[3])) # number=44
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[2])) # number=45
c.append(cirq.H.on(input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.H.on(input_qubit[2])) # number=19
c.append(cirq.H.on(input_qubit[3])) # number=20
c.append(cirq.X.on(input_qubit[0])) # number=9
c.append(cirq.X.on(input_qubit[1])) # number=10
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=35
c.append(cirq.X.on(input_qubit[3])) # number=36
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=37
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=24
c.append(cirq.X.on(input_qubit[0])) # number=25
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=26
c.append(cirq.X.on(input_qubit[1])) # number=14
c.append(cirq.X.on(input_qubit[2])) # number=15
c.append(cirq.X.on(input_qubit[3])) # number=16
c.append(cirq.X.on(input_qubit[3])) # number=46
c.append(cirq.Y.on(input_qubit[1])) # number=47
c.append(cirq.X.on(input_qubit[1])) # number=22
c.append(cirq.X.on(input_qubit[1])) # number=23
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 5
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1381.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | 38.480769 | 77 | 0.655922 |
acf645295be8ed98daf512c2a2397a41037d6b47 | 5,804 | py | Python | src/send_email.py | testingautomated-usi/rl-plasticity-experiments | a32cebcee89f6f734477a1f1bdd8b7f8ef7aa99a | [
"MIT"
] | null | null | null | src/send_email.py | testingautomated-usi/rl-plasticity-experiments | a32cebcee89f6f734477a1f1bdd8b7f8ef7aa99a | [
"MIT"
] | null | null | null | src/send_email.py | testingautomated-usi/rl-plasticity-experiments | a32cebcee89f6f734477a1f1bdd8b7f8ef7aa99a | [
"MIT"
] | null | null | null | import argparse
import glob
import os
import smtplib
import ssl
import time
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from typing import List, Tuple
import yaml
from utilities import HOME, SUPPORTED_ENVS
def build_attachment(filepath: str, name: str) -> MIMEBase:
# Open file in binary mode
with open(filepath, "rb") as attachment:
# Add file as application/octet-stream
# Email client can usually download this automatically as attachment
part = MIMEBase("application", "octet-stream")
part.set_payload(attachment.read())
# Encode file in ASCII characters to send by email
encoders.encode_base64(part)
# Add header as key/value pair to attachment part
part.add_header(
"Content-Disposition", f"attachment; filename= {name}",
)
return part
def get_account_details() -> Tuple[str, str, str]:
abs_params_dir = os.path.abspath(HOME)
with open(abs_params_dir + "/gmail.yml", "r") as f:
account_details = yaml.safe_load(f)
sender_email = account_details["account"]["sender_email"]
receiver_email = account_details["account"]["receiver_email"]
password = account_details["account"]["password"]
return sender_email, receiver_email, password
def send_email(
subject: str, password: str, from_field: str, to_field: str, body: str, attachments: List[MIMEBase] = None
) -> None:
message = MIMEMultipart()
message["Subject"] = subject
message["From"] = from_field
message["To"] = to_field
message.attach(MIMEText("{} \n\n".format(body), "plain"))
if attachments:
for attachment in attachments:
message.attach(attachment)
context = ssl.create_default_context()
with smtplib.SMTP_SSL("smtp.gmail.com", 465, context=context) as server:
server.login(from_field, password)
server.sendmail(from_field, to_field, message.as_string())
def convert_time(time_elapsed_s: float) -> Tuple[float, str]:
if time_elapsed_s < 60:
return time_elapsed_s, "s"
if 60 <= time_elapsed_s < 3600:
return round(time_elapsed_s / 60, 2), "min"
if time_elapsed_s > 3600:
return round(time_elapsed_s / 3600, 2), "h"
class MonitorProgress:
def __init__(
self,
algo_name: str,
env_name: str,
results_dir: str,
search_type: str,
start_search_time: float,
param_names_string: str = None,
starting_progress_report_number: int = 0,
):
self.algo_name = algo_name
self.env_name = env_name
self.param_names_string = param_names_string
self.results_dir = results_dir
self.progress_report_number = starting_progress_report_number
self.search_type = search_type
self.start_search_time = start_search_time
self.sender_email, self.receiver_email, self.password = get_account_details()
def send_progress_report(self, time_elapsed: float):
subject = (
"Progress report # {} for experiment {}_{}_{}_{}".format(
self.progress_report_number, self.search_type, self.env_name, self.algo_name, self.param_names_string
)
if self.param_names_string
else "Progress report # {} for experiment {}_{}_{}".format(
self.progress_report_number, self.search_type, self.env_name, self.algo_name
)
)
time_elapsed_unit, unit = convert_time(time_elapsed_s=time_elapsed)
time_elapsed_unit_global, unit_global = convert_time(time_elapsed_s=(time.time() - self.start_search_time))
body = "Time elapsed iteration {} {}. Time elapsed global {} {}\n".format(
time_elapsed_unit, unit, time_elapsed_unit_global, unit_global
)
body += "Documents in results dir {}: \n".format(self.results_dir)
for document in os.listdir(self.results_dir):
body += document + "\n"
send_email(
subject=subject, password=self.password, from_field=self.sender_email, to_field=self.receiver_email, body=body
)
self.progress_report_number += 1
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--subject", type=str, required=True, default="Test email subject")
parser.add_argument("--body", type=str, required=True, default="No body")
parser.add_argument("--experiment_type", choices=["alphatest", "random"], required=True)
parser.add_argument("--env_name", choices=SUPPORTED_ENVS, required=True)
parser.add_argument("--filename_prefix", type=str, required=True, default="filename")
args, _ = parser.parse_known_args()
sender_email, receiver_email, password = get_account_details()
scripts_folder = os.path.join(HOME, "scripts")
experiments_folder = os.path.join(scripts_folder, args.experiment_type)
env_folder = os.path.join(experiments_folder, args.env_name)
# assuming there is only one file that matches
output_file = glob.glob(os.path.join(env_folder, "{}*.out".format(args.filename_prefix)))[0]
error_file = glob.glob(os.path.join(env_folder, "{}*.err".format(args.filename_prefix)))[0]
attachment_1 = build_attachment(filepath=output_file, name="{}_out.txt".format(args.filename_prefix))
attachment_2 = build_attachment(filepath=error_file, name="{}_err.txt".format(args.filename_prefix))
attachments = [attachment_1, attachment_2]
send_email(
subject=args.subject,
password=password,
from_field=sender_email,
to_field=receiver_email,
body=args.body,
attachments=attachments,
)
os.remove(output_file)
os.remove(error_file)
| 36.734177 | 122 | 0.685045 |
acf645bff3b1922133fbc9f261f8e0e27452862a | 980 | py | Python | ops/utils.py | noahzhy/temporal-shift-module | 41fbcca37e0148849d55ec581980cd12ad1386ee | [
"Apache-2.0"
] | 1,693 | 2019-04-09T03:41:31.000Z | 2022-03-31T08:18:53.000Z | ops/utils.py | noahzhy/temporal-shift-module | 41fbcca37e0148849d55ec581980cd12ad1386ee | [
"Apache-2.0"
] | 202 | 2019-05-10T19:39:02.000Z | 2022-03-29T01:41:48.000Z | ops/utils.py | noahzhy/temporal-shift-module | 41fbcca37e0148849d55ec581980cd12ad1386ee | [
"Apache-2.0"
] | 412 | 2019-04-14T09:26:16.000Z | 2022-03-31T08:18:55.000Z | import numpy as np
def softmax(scores):
es = np.exp(scores - scores.max(axis=-1)[..., None])
return es / es.sum(axis=-1)[..., None]
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0 / batch_size))
return res | 23.902439 | 64 | 0.571429 |
acf64694edeaccb56111a8dbab17badd70b87b33 | 1,746 | py | Python | scripts/examples/simulate_container_events.py | lalten/kreathon2018 | 61fd0cedc949f287ba8ff8ecd3e1f5841264d74b | [
"MIT"
] | null | null | null | scripts/examples/simulate_container_events.py | lalten/kreathon2018 | 61fd0cedc949f287ba8ff8ecd3e1f5841264d74b | [
"MIT"
] | null | null | null | scripts/examples/simulate_container_events.py | lalten/kreathon2018 | 61fd0cedc949f287ba8ff8ecd3e1f5841264d74b | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division
import random
import time
import numpy as np
import paho.mqtt.client as mqtt
class SendMissingMqttNodes:
measurement_topic = "awesome_bin" # Arduinos publish on this topic
broker_url = "broker.mqttdashboard.com" # url and port of public MQTT broker (no security...)
broker_port = 1883
# initialize with weights
def __init__(self):
self.client = mqtt.Client()
self.client.connect(SendMissingMqttNodes.broker_url, SendMissingMqttNodes.broker_port)
self.client.loop_start()
self.fill_level = np.zeros((101,1))
self.fill_weights = 10 * np.random.rand(101)
# def full_random(self):
# for sensor_id in range(5, 101):
# r = random.random()*1000.0
def spin(self):
try:
while True:
for sensor_id in range(5, 101):
self.fill_level[sensor_id] += self.fill_weights[sensor_id] * random.random()
self.fill_level[sensor_id] = min(self.fill_level[sensor_id], 1000)
if self.fill_level[sensor_id] == 1000:
self.fill_level[sensor_id] = 0 # just reset to zero for now
range_status = 0
bat_mv = -1
this_fill_level = int(round(self.fill_level[sensor_id]))
payload_str = '{}_{}_{}_{}\n'.format(sensor_id, this_fill_level, range_status, bat_mv)
self.client.publish(SendMissingMqttNodes.measurement_topic, payload_str)
time.sleep(5)
except KeyboardInterrupt:
pass
if __name__ == "__main__":
n = SendMissingMqttNodes()
n.spin()
| 34.235294 | 106 | 0.606529 |
acf6470fcde684f0ae5ac5c3cf5dff8e4284adf2 | 36,034 | py | Python | onmt/train_utils/mp_trainer.py | quanpn90/SpeechGAN | b2aa923ac40474bd7a6fa2acfd290eb2770e0972 | [
"MIT"
] | null | null | null | onmt/train_utils/mp_trainer.py | quanpn90/SpeechGAN | b2aa923ac40474bd7a6fa2acfd290eb2770e0972 | [
"MIT"
] | null | null | null | onmt/train_utils/mp_trainer.py | quanpn90/SpeechGAN | b2aa923ac40474bd7a6fa2acfd290eb2770e0972 | [
"MIT"
] | null | null | null | from __future__ import division
import datetime
import gc
import inspect
import math
import os
import re
import time
import torch
import copy
import sys
from apex import amp
from apex.parallel import DistributedDataParallel as DDP
import onmt
import onmt.markdown
import onmt.modules
from onmt.data.data_iterator import DataIterator
from onmt.data.multidata_iterator import MultiDataIterator
from onmt.data.dataset import rewrap
from onmt.model_factory import build_model, build_language_model, optimize_model
from onmt.model_factory import init_model_parameters
from onmt.modules.loss import NMTLossFunc, NMTAndCTCLossFunc
from onmt.train_utils.stats import Logger
from onmt.utils import checkpoint_paths, normalize_gradients
from onmt.model_factory import build_model, optimize_model, init_model_parameters
import torch.distributed as dist
import warnings
warnings.filterwarnings("ignore", category=UserWarning)
def varname(p):
for line in inspect.getframeinfo(inspect.currentframe().f_back)[3]:
m = re.search(r'\bvarname\s*\(\s*([A-Za-z_][A-Za-z0-9_]*)\s*\)', line)
if m:
return m.group(1)
def prepare_sample(batch, device=None, fp16=False):
"""
Put minibatch on the corresponding GPU
:param batch:
:param device:
:param fp16:
:return:
"""
if isinstance(batch, list):
batch = batch[0]
batch = rewrap(batch)
batch.cuda(fp16=fp16, device=device)
return batch
def generate_data_iterator(dataset, rank, world_size, seed,
num_workers=1, epoch=1., buffer_size=0):
# check if dataset is a list:
if isinstance(dataset, list):
# this is a multidataset
data_iterator = MultiDataIterator(dataset, seed=seed, num_workers=num_workers,
epoch=epoch, buffer_size=buffer_size,
num_shards=world_size, shard_id=rank)
else:
data_iterator = DataIterator(dataset, dataset.collater, dataset.batches, seed=seed,
num_workers=num_workers, epoch=epoch, buffer_size=buffer_size,
num_shards=world_size, shard_id=rank)
return data_iterator
def zero_tensor():
return torch.Tensor([0]).cuda()
def all_reduce_and_rescale_tensors(tensors, rescale_denom,
buffer_size=10485760):
"""All-reduce and rescale tensors in chunks of the specified size.
Args:
tensors: list of Tensors to all-reduce
rescale_denom: denominator for rescaling summed Tensors
buffer_size: all-reduce chunk size in bytes
"""
# buffer size in bytes, determine equiv. # of elements based on data type
buffer_t = tensors[0].new(
math.ceil(buffer_size / tensors[0].element_size())).zero_()
buffer = []
def all_reduce_buffer():
# copy tensors into buffer_t
offset = 0
for t in buffer:
numel = t.numel()
buffer_t[offset:offset+numel].copy_(t.view(-1))
offset += numel
# all-reduce and rescale
torch.distributed.all_reduce(buffer_t[:offset])
buffer_t.div_(rescale_denom)
# copy all-reduced buffer back into tensors
offset = 0
for t in buffer:
numel = t.numel()
t.view(-1).copy_(buffer_t[offset:offset+numel])
offset += numel
with torch.no_grad():
filled = 0
for t in tensors:
sz = t.numel() * t.element_size()
if sz > buffer_size:
# tensor is bigger than buffer, all-reduce and rescale directly
torch.distributed.all_reduce(t)
t.div_(rescale_denom)
elif filled + sz > buffer_size:
# buffer is full, all-reduce and replace buffer with grad
all_reduce_buffer()
buffer = [t]
filled = sz
else:
# add tensor to buffer
buffer.append(t)
filled += sz
if len(buffer) > 0:
all_reduce_buffer()
class Trainer(object):
def __init__(self, device, train_data, valid_data, dicts, opt, setup_optimizer=True):
"""
:param model:
:param device: int (GPU id)
:param loss_function:
:param train_data:
:param valid_data:
:param dicts:
:param opt:
"""
# self.model = model
# self.model = model
# self.loss_function = loss_function
self.device = device
opt.node_rank = 0
opt.nodes = 1
self.world_size = len(opt.gpus)
# in the case of single node distributed, it should equal self.device
self.rank = self.device
# make a group to later use with dist.all_reduce
self.group = dist.group.WORLD
self.print("[INFO] Training Options:", opt)
dist.init_process_group(backend='nccl', init_method='env://', world_size=self.world_size, rank=self.rank)
self.model = None
if self.rank == 0:
self.train_data = train_data
self.valid_data = valid_data
else:
self.train_data = copy.deepcopy(train_data)
self.valid_data = copy.deepcopy(valid_data)
self.dicts = dicts
self.opt = opt
self.cuda = (len(opt.gpus) >= 1 and opt.gpus[0] >= 0)
assert self.cuda, "[ERROR] Training is only available on GPUs."
self.start_time = 0
# setting up models and others
if opt.lfv_multilingual:
from onmt.models.speech_recognizer.lid_loss import CrossEntropyLIDLoss
lid_loss = CrossEntropyLIDLoss(opt.n_languages, opt.label_smoothing, opt.fast_xentropy)
self.loss_function.add_loss_function(lid_loss, 'lid_loss')
torch.manual_seed(self.opt.seed)
# note: we must start creating models after ccreating the processes
# for some reason passing a pre-created model to a process creates a "pickle" error
if not opt.fusion:
if self.is_main():
print("BUILDING MODEL .... ", flush=True)
model = build_model(opt, dicts)
""" Building the loss function """
if opt.ctc_loss != 0:
loss_function = NMTAndCTCLossFunc(dicts['tgt'].size(),
label_smoothing=opt.label_smoothing,
ctc_weight=opt.ctc_loss)
elif opt.nce:
from onmt.modules.nce.nce_loss import NCELoss
loss_function = NCELoss(opt.model_size, dicts['tgt'].size(), noise_ratio=opt.nce_noise,
logz=9, label_smoothing=opt.label_smoothing)
else:
loss_function = NMTLossFunc(opt.model_size, dicts['tgt'].size(),
label_smoothing=opt.label_smoothing,
mirror=opt.mirror_loss,
fast_xentropy=opt.fast_xentropy)
# This function replaces modules with the more optimized counterparts so that it can run faster
# Currently exp with LayerNorm
if not opt.memory_profiling:
# distributed is required to convert BatchNorm to SyncBatchNorm for DDP
optimize_model(model, distributed=(self.world_size > 1))
# optimize_model(model)
init_model_parameters(model, opt)
self.model = model
self.loss_function = loss_function
if self.cuda:
torch.cuda.set_device(self.device)
self.loss_function = self.loss_function.cuda(device=self.device)
self.model = self.model.cuda(device=self.device)
# Ensure that the distributed copies have the same initial parameters
# Manual seed may not work the same for different GPU models.
if self.world_size > 1:
params = [p for p in self.model.parameters()]
with torch.no_grad():
if not self.is_main():
for p in params:
p.zero_()
else:
for p in params:
p.add_(0)
if self.world_size > 1:
params = [p for p in self.model.parameters()]
all_reduce_and_rescale_tensors(params, 1)
if setup_optimizer:
self.optim = onmt.Optim(opt)
self.optim.set_parameters(self.model.parameters())
if self.is_main():
print("[INFO] Optimizer: ", self.optim.optimizer)
if not self.opt.fp16:
opt_level = "O0"
keep_batchnorm_fp32 = False
elif self.opt.fp16_mixed:
opt_level = "O1"
keep_batchnorm_fp32 = None
else:
opt_level = "O2"
keep_batchnorm_fp32 = False
if self.cuda:
self.model, self.optim.optimizer = amp.initialize(self.model,
self.optim.optimizer,
opt_level=opt_level,
keep_batchnorm_fp32=keep_batchnorm_fp32,
loss_scale="dynamic",
verbosity=1 if self.opt.verbose else 0)
# wrap the model into DDP after initializing by amp
if self.world_size > 1:
"""
delay_allreduce is required to avoid allreduce error during backward pass
"""
self.model = DDP(self.model, delay_allreduce=True, gradient_average=False)
# torch DDP is more likely to work with the official amp autocast
# self.model = torch.nn.parallel.DistributedDataParallel(self.model, device_ids=[self.rank],
# output_device=self.rank,
# find_unused_parameters=True)
print("[INFO] Process %d ready." % self.rank, flush=True)
def is_main(self):
return self.rank == 0
def print(self, *content, flush=False):
"""
A helper function to print only on the main process
:param flush:
:param content:
:return:
"""
if self.is_main():
print(*content, flush=flush)
else:
return
def load_encoder_weight(self, checkpoint_file):
print("Loading pretrained models from %s" % checkpoint_file)
checkpoint = torch.load(checkpoint_file, map_location=lambda storage, loc: storage)
pretrained_model = build_model(checkpoint['opt'], checkpoint['dicts'])
pretrained_model.load_state_dict(checkpoint['model'])
print("Loading pretrained encoder weights ...")
pretrained_model.encoder.language_embedding = None
enc_language_embedding = self.model.encoder.language_embedding
self.model.encoder.language_embedding = None
encoder_state_dict = pretrained_model.encoder.state_dict()
self.model.encoder.load_state_dict(encoder_state_dict)
self.model.encoder.language_embedding = enc_language_embedding
return
def load_decoder_weight(self, checkpoint_file):
self.print("Loading pretrained models from %s" % checkpoint_file)
checkpoint = torch.load(checkpoint_file, map_location=lambda storage, loc: storage)
chkpoint_dict = checkpoint['dicts']
pretrained_model = build_model(checkpoint['opt'], chkpoint_dict)
pretrained_model.load_state_dict(checkpoint['model'])
self.print("Loading pretrained decoder weights ...")
# first we have to remove the embeddings which probably have difference size ...
pretrained_word_emb = pretrained_model.decoder.word_lut
pretrained_model.decoder.word_lut = None
pretrained_lang_emb = pretrained_model.decoder.language_embeddings
pretrained_model.decoder.language_embeddings = None
# actually we assume that two decoders have the same language embeddings...
untrained_word_emb = self.model.decoder.word_lut
self.model.decoder.word_lut = None
untrained_lang_emb = self.model.decoder.language_embeddings
self.model.decoder.language_embeddings = None
decoder_state_dict = pretrained_model.decoder.state_dict()
self.model.decoder.load_state_dict(decoder_state_dict)
# now we load the embeddings ....
n_copies = 0
for token in self.dicts['tgt'].labelToIdx:
untrained_id = self.dicts['tgt'].labelToIdx[token]
if token in chkpoint_dict['tgt'].labelToIdx:
pretrained_id = chkpoint_dict['tgt'].labelToIdx[token]
untrained_word_emb.weight.data[untrained_id].copy_(pretrained_word_emb.weight.data[pretrained_id])
self.model.generator[0].linear.bias.data[untrained_id].copy_(pretrained_model
.generator[0].linear.bias.data[
pretrained_id])
n_copies += 1
self.print("Copied embedding for %d words" % n_copies)
self.model.decoder.word_lut = untrained_word_emb
# now we load the language embeddings ...
if pretrained_lang_emb and untrained_lang_emb and 'langs' in chkpoint_dict:
for lang in self.dicts['langs']:
untrained_id = self.dicts['langs'][lang]
if lang in chkpoint_dict['langs']:
pretrained_id = chkpoint_dict['langs'][lang]
untrained_lang_emb.weight.data[untrained_id].copy_(pretrained_lang_emb.weight.data[pretrained_id])
self.model.decoder.language_embeddings = untrained_lang_emb
def warm_up(self):
"""
Warmup the memory allocator, by attempting to fit the largest batch
:return:
"""
# if self.opt.memory_profiling:
# from pytorch_memlab import MemReporter
# reporter = MemReporter()
#
batch = self.train_data[0].get_largest_batch() if isinstance(self.train_data, list) \
else self.train_data.get_largest_batch()
opt = self.opt
if self.cuda:
batch.cuda(fp16=self.opt.fp16 and not self.opt.fp16_mixed)
self.model.train()
self.loss_function.train()
self.model.zero_grad()
oom = False
if self.opt.memory_profiling:
self.print("Input size: ")
self.print(batch.size, batch.src_size, batch.tgt_size)
if opt.streaming:
streaming_state = self.model.init_stream()
else:
streaming_state = None
try:
targets = batch.get('target_output')
tgt_mask = None
outputs = self.model(batch, streaming=opt.streaming, target_mask=tgt_mask,
zero_encoder=opt.zero_encoder,
mirror=opt.mirror_loss, streaming_state=streaming_state,
nce=opt.nce)
outputs['tgt_mask'] = tgt_mask
loss_dict = self.loss_function(outputs, targets, model=self.model)
loss_data = loss_dict['data']
loss = loss_dict['loss'] # a little trick to avoid gradient overflow with fp16
full_loss = loss
if opt.mirror_loss:
rev_loss = loss_dict['rev_loss']
mirror_loss = loss_dict['mirror_loss']
full_loss = full_loss + rev_loss + mirror_loss
# reconstruction loss
if opt.reconstruct:
rec_loss = loss_dict['rec_loss']
rec_loss = rec_loss
full_loss = full_loss + rec_loss
if opt.lfv_multilingual:
lid_logits = outputs['lid_logits']
lid_labels = batch.get('target_lang')
lid_loss_function = self.loss_function.get_loss_function('lid_loss')
lid_loss = lid_loss_function(lid_logits, lid_labels)
full_loss = full_loss + lid_loss
optimizer = self.optim.optimizer
if self.opt.memory_profiling:
reporter.report(verbose=True)
# for obj in gc.get_objects():
# try:
# if torch.is_tensor(obj) or (hasattr(obj, 'data') and torch.is_tensor(obj.data)):
# # print(varname(obj))
# # we can rule out parameter cost later
# # if 'parameter' not in type(obj):
# # if len(obj.shape) == 3:
# # if not isinstance(obj, torch.nn.parameter.Parameter):
# # tensor = obj
# # numel = tensor.
# print(type(obj), obj.type(), obj.size())
# except:
# pass
# print("Memory profiling complete.")
# print(torch.cuda.memory_summary())
# exit()
if self.cuda:
with amp.scale_loss(full_loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.div_(batch.tgt_size).backward()
if self.opt.memory_profiling:
print('========= after backward =========')
reporter.report(verbose=True)
self.model.zero_grad()
self.optim.zero_grad()
# self.optim.step()
# self.optim.reset()
except RuntimeError as e:
if 'out of memory' in str(e):
oom = True
else:
raise e
if oom:
print("* Warning: out-of-memory in warming up. This is due to the largest batch is too big for the GPU.",
flush=True)
else:
print("* Warming up successfully.", flush=True)
if self.opt.memory_profiling:
if hasattr(torch.cuda, 'memory_summary'):
print(torch.cuda.memory_summary())
exit()
pass
def save(self, epoch, valid_ppl, itr=None):
opt = self.opt
model = self.model
dicts = self.dicts
model_state_dict = self.model.state_dict()
optim_state_dict = self.optim.state_dict()
if itr:
itr_state_dict = itr.state_dict()
else:
itr_state_dict = None
# drop a checkpoint
checkpoint = {
'model': model_state_dict,
'dicts': dicts,
'opt': opt,
'epoch': epoch,
'itr': itr_state_dict,
'optim': optim_state_dict,
'amp': amp.state_dict()
}
file_name = '%s_ppl_%.6f_e%.2f.pt' % (opt.save_model, valid_ppl, epoch)
print('Writing to %s' % file_name)
torch.save(checkpoint, file_name)
# check the save directory here
checkpoint_dir = os.path.dirname(opt.save_model)
existed_save_files = checkpoint_paths(checkpoint_dir)
for save_file in existed_save_files[opt.keep_save_files:]:
print(" * Deleting old save file %s ...." % save_file)
os.remove(save_file)
def eval(self, data):
opt = self.opt
rank = self.device
world_size = self.world_size
# the data iterator creates an epoch iterator
data_iterator = generate_data_iterator(data, rank, world_size, seed=self.opt.seed,
num_workers=opt.num_workers, epoch=1, buffer_size=opt.buffer_size)
epoch_iterator = data_iterator.next_epoch_itr(False, pin_memory=False)
data_size = len(epoch_iterator)
i = 0
self.model.eval()
self.loss_function.eval()
# self.model.module.reset_states()
total_loss = zero_tensor()
total_words = zero_tensor()
if opt.streaming:
streaming_state = self.model.init_stream()
else:
streaming_state = None
with torch.no_grad():
while not data_iterator.end_of_epoch():
samples = next(epoch_iterator)
if samples:
batch = prepare_sample(samples, device=self.device, fp16=self.opt.fp16 and not self.opt.fp16_mixed)
targets = batch.get('target_output')
tgt_mask = targets.ne(onmt.constants.PAD)
outputs = self.model(batch, streaming=opt.streaming, target_mask=tgt_mask,
mirror=opt.mirror_loss, streaming_state=streaming_state, nce=opt.nce)
outputs['tgt_mask'] = tgt_mask
loss_dict = self.loss_function(outputs, targets, model=self.model, eval=True)
loss_data = loss_dict['data']
total_loss.add_(loss_data)
total_words.add_(batch.tgt_size)
i = i + 1
# allreduce the total loss and total words from other processes
dist.all_reduce(total_loss, op=dist.ReduceOp.SUM, group=self.group)
dist.all_reduce(total_words, op=dist.ReduceOp.SUM, group=self.group)
self.model.train()
self.loss_function.train()
return total_loss / total_words
def train_epoch(self, epoch, resume=False, itr_progress=None):
global rec_ppl
opt = self.opt
train_data = self.train_data
streaming = opt.streaming
# Clear the gradients of the model
self.model.zero_grad()
# self.model.module.reset_states()
dataset = train_data
data_iterator = generate_data_iterator(dataset, self.rank, self.world_size,
seed=self.opt.seed, num_workers=opt.num_workers,
epoch=epoch, buffer_size=opt.buffer_size)
# TODO: fix resume which is currently buggy
if resume:
data_iterator.load_state_dict(itr_progress)
epoch_iterator = data_iterator.next_epoch_itr(not streaming, pin_memory=opt.pin_memory)
total_tokens, total_loss, total_words = zero_tensor(), zero_tensor(), zero_tensor()
total_non_pads = zero_tensor()
report_loss, report_tgt_words = zero_tensor(), zero_tensor()
report_src_words = zero_tensor()
report_rec_loss, report_rev_loss, report_mirror_loss = zero_tensor(), zero_tensor(), zero_tensor()
start = time.time()
n_samples = len(data_iterator)
counter = 0
num_accumulated_words = zero_tensor()
num_accumulated_sents = zero_tensor()
grad_scaler = 1
nan = False
nan_counter = zero_tensor()
if opt.streaming:
streaming_state = self.model.init_stream()
else:
streaming_state = None
i = data_iterator.iterations_in_epoch if not isinstance(train_data, list) else epoch_iterator.n_yielded
i = i * self.world_size
while not data_iterator.end_of_epoch():
curriculum = (epoch < opt.curriculum)
# this batch generator is not very clean atm
# TODO: move everything to the multiGPU trainer
samples = next(epoch_iterator)
batch = prepare_sample(samples, device=self.device, fp16=self.opt.fp16 and not self.opt.fp16_mixed)
if opt.streaming:
if train_data.is_new_stream():
streaming_state = self.model.init_stream()
else:
streaming_state = None
# TODO: dealing with oom during distributed training
oom = False
try:
# outputs is a dictionary containing keys/values necessary for loss function
# can be flexibly controlled within models for easier extensibility
counter = counter + 1
# reduction_disabled = False if counter >= opt.update_frequency or i == (n_samples-1) else True
# self.model.require_backward_grad_sync = not reduction_disabled
targets = batch.get('target_output')
tgt_mask = targets.ne(onmt.constants.PAD)
outputs = self.model(batch, streaming=opt.streaming, target_mask=tgt_mask,
zero_encoder=opt.zero_encoder,
mirror=opt.mirror_loss, streaming_state=streaming_state,
nce=opt.nce)
batch_size = batch.size
outputs['tgt_mask'] = tgt_mask
loss_dict = self.loss_function(outputs, targets, model=self.model)
loss_data = loss_dict['data']
loss = loss_dict['loss'] # a little trick to avoid gradient overflow with fp16
full_loss = loss
if opt.mirror_loss:
rev_loss = loss_dict['rev_loss']
rev_loss_data = loss_dict['rev_loss_data']
mirror_loss = loss_dict['mirror_loss']
full_loss = full_loss + rev_loss + mirror_loss
mirror_loss_data = loss_dict['mirror_loss'].item()
else:
rev_loss_data = None
mirror_loss_data = 0
# reconstruction loss
if opt.reconstruct:
rec_loss = loss_dict['rec_loss']
rec_loss = rec_loss
full_loss = full_loss + rec_loss
rec_loss_data = loss_dict['rec_loss_data']
else:
rec_loss_data = None
if opt.lfv_multilingual:
lid_logits = outputs['lid_logits']
lid_labels = batch.get('target_lang')
lid_loss_function = self.loss_function.get_loss_function('lid_loss')
lid_loss = lid_loss_function(lid_logits, lid_labels)
full_loss = full_loss + lid_loss
optimizer = self.optim.optimizer
# When the batch size is large, each gradient step is very easy to explode on fp16
# Normalizing the loss to grad scaler ensures this will not happen
full_loss.div_(grad_scaler)
# reduction_disabled = False
with amp.scale_loss(full_loss, optimizer) as scaled_loss:
scaled_loss.backward()
del outputs
except RuntimeError as e:
if 'out of memory' in str(e):
print('[WARNING]: ran out of memory on GPU %d' % self.rank, flush=True)
oom = True
torch.cuda.empty_cache()
loss = 0
if opt.streaming: # reset stream in this case ...
streaming_state = self.model.init_stream()
raise e
else:
raise e
batch_size = batch.size
src_size = batch.src_size
tgt_size = batch.tgt_size
num_accumulated_words.add_(tgt_size)
num_accumulated_sents.add_(batch_size)
# We only update the parameters after getting gradients from n mini-batches
update_flag = False
if counter >= opt.update_frequency:
update_flag = True
elif i == n_samples - 1: # update for the last minibatch
update_flag = True
if update_flag:
# accumulated gradient case, in this case the update frequency
dist.all_reduce(num_accumulated_words, op=dist.ReduceOp.SUM, group=self.group)
# if (counter == 1 and self.opt.update_frequency != 1) or counter > 1:
grad_denom = 1 / grad_scaler
if self.opt.normalize_gradient:
grad_denom = num_accumulated_words.item() * grad_denom
else:
grad_denom = 1
# When we accumulate the gradients, each gradient is already normalized by a constant grad_scaler
normalize_gradients(amp.master_params(optimizer), grad_denom)
# Update the parameters.
self.optim.step()
self.optim.zero_grad()
self.model.zero_grad()
counter = 0
num_accumulated_words.zero_()
num_accumulated_sents.zero_()
num_updates = self.optim._step
if opt.save_every > 0 and num_updates % opt.save_every == -1 % opt.save_every:
valid_loss = self.eval(self.valid_data)
valid_ppl = math.exp(min(valid_loss, 100))
if self.is_main():
print('Validation perplexity: %g' % valid_ppl)
ep = float(epoch) - 1. + ((float(i) + 1.) / n_samples)
self.save(ep, valid_ppl, itr=data_iterator)
num_words = tgt_size
report_loss.add_(loss_data)
report_tgt_words.add_(num_words)
report_src_words.add_(src_size)
total_loss.add_(loss_data)
total_words.add_(num_words)
# total_tokens += batch.get('target_output').nelement()
# total_non_pads += batch.get('target_output').ne(onmt.constants.PAD).sum().item()
# batch_efficiency = total_non_pads / total_tokens
if opt.reconstruct:
report_rec_loss.add_(rec_loss_data)
if opt.mirror_loss:
report_rev_loss.add_(rev_loss_data)
report_mirror_loss.add_(mirror_loss_data)
# control the index a little bit to ensure the log is always printed
if i == 0 or ((i+1) % opt.log_interval < self.world_size):
dist.all_reduce(report_loss, op=dist.ReduceOp.SUM, group=self.group)
dist.all_reduce(report_tgt_words, op=dist.ReduceOp.SUM, group=self.group)
dist.all_reduce(report_src_words, op=dist.ReduceOp.SUM, group=self.group)
if self.is_main():
log_string = ("Epoch %2d, %5d/%5d; ; ppl: %6.2f ; " %
(epoch, i + 1, len(data_iterator),
math.exp(report_loss.item() / report_tgt_words.item())))
if opt.reconstruct:
dist.all_reduce(report_rec_loss, op=dist.ReduceOp.SUM, group=self.group)
rec_ppl = math.exp(report_rec_loss.item() / report_src_words.item())
log_string += (" rec_ppl: %6.2f ; " % rec_ppl)
if opt.mirror_loss:
dist.all_reduce(report_rev_loss, op=dist.ReduceOp.SUM, group=self.group)
rev_ppl = math.exp(report_rev_loss.item() / report_tgt_words.item())
log_string += (" rev_ppl: %6.2f ; " % rev_ppl)
log_string += (" mir_loss: %6.2f ; " % (report_mirror_loss / report_tgt_words))
log_string += ("lr: %.7f ; updates: %7d; " %
(self.optim.getLearningRate(),
self.optim._step))
log_string += ("%5.0f src tok/s; %5.0f tgt tok/s; " %
(report_src_words.item() / (time.time() - start),
report_tgt_words.item() / (time.time() - start)))
log_string += ("%s elapsed" %
str(datetime.timedelta(seconds=int(time.time() - self.start_time))))
self.print(log_string, flush=True)
report_loss.zero_()
report_tgt_words.zero_()
report_src_words.zero_()
report_rec_loss.zero_(); report_rev_loss.zero_(); report_mirror_loss.zero_()
start = time.time()
# increase i by world size
i = i + self.world_size
return total_loss / total_words
# def run(self, save_file=None):
def run(self, checkpoint=None):
opt = self.opt
if checkpoint is not None:
raise NotImplementedError
# TODO: have loading checkpoints for each process
self.model.load_state_dict(checkpoint['model'])
prec_opt = checkpoint['opt'] if 'opt' in checkpoint else None
opt.reset_optim = True
if not opt.reset_optim:
if self.is_main():
print("* Loading optimizer states ... ")
self.optim.load_state_dict(checkpoint['optim'])
if prec_opt is not None and hasattr(prec_opt, "fp16_mixed"):
# Only load amp information if the mode is the same
# Maybe its better to change between optimization mode?
if opt.fp16_mixed == prec_opt.fp16_mixed and opt.fp16 == prec_opt.fp16:
if 'amp' in checkpoint:
try:
amp.load_state_dict(checkpoint['amp'])
except Exception:
# loading the amp state can fail
pass
# Only load the progress when we use the same optimizer
if 'itr' in checkpoint:
itr_progress = checkpoint['itr']
else:
itr_progress = None
resume = True
start_epoch = checkpoint['epoch'] if 'epoch' in checkpoint else 1
if start_epoch is None:
start_epoch = 1
else:
itr_progress = None
resume = False
start_epoch = 1
del checkpoint['model']
optim_state_dict = checkpoint['optim']
# del checkpoint['optim']
del checkpoint
else:
itr_progress = None
resume = False
start_epoch = 1
if opt.load_encoder_from:
self.load_encoder_weight(opt.load_encoder_from)
#
if opt.load_decoder_from:
self.load_decoder_weight(opt.load_decoder_from)
# if we are on a GPU: warm up the memory allocator
if self.cuda:
self.warm_up()
valid_loss = self.eval(self.valid_data)
valid_ppl = math.exp(min(valid_loss, 100))
if self.is_main():
print('[INFO] Validation perplexity: %g' % valid_ppl, flush=True)
self.start_time = time.time()
for epoch in range(start_epoch, start_epoch + opt.epochs):
self.print('')
# (1) train for one epoch on the training set
train_loss = self.train_epoch(epoch, resume=resume, itr_progress=itr_progress)
train_ppl = math.exp(min(train_loss, 100))
self.print('[INFO] Train perplexity: %g' % train_ppl)
# (2) evaluate on the validation set
valid_loss = self.eval(self.valid_data)
valid_ppl = math.exp(min(valid_loss, 100))
if self.is_main():
print('[INFO] Validation perplexity: %g' % valid_ppl)
self.save(epoch, valid_ppl)
itr_progress = None
resume = False
| 39.040087 | 119 | 0.560249 |
acf6474e5f32147c022fe51cf27944cb26e9a90d | 1,364 | py | Python | GeekBrains_Python_Base_Work_With_Files_Practice/hw_5_4_generate_file_non_programm.py | MaximGasilin/Python_Education_Base | 811a4a8f32639f30f982b1f0e447f7b1eff06326 | [
"MIT"
] | null | null | null | GeekBrains_Python_Base_Work_With_Files_Practice/hw_5_4_generate_file_non_programm.py | MaximGasilin/Python_Education_Base | 811a4a8f32639f30f982b1f0e447f7b1eff06326 | [
"MIT"
] | null | null | null | GeekBrains_Python_Base_Work_With_Files_Practice/hw_5_4_generate_file_non_programm.py | MaximGasilin/Python_Education_Base | 811a4a8f32639f30f982b1f0e447f7b1eff06326 | [
"MIT"
] | 1 | 2020-05-17T17:22:46.000Z | 2020-05-17T17:22:46.000Z | # Задание # 4
#
# Создать (не программно) текстовый файл со следующим содержимым:
# One — 1
# Two — 2
# Three — 3
# Four — 4
from random import randint
mapping_int_to_word = {1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five', 6: 'six', 7: 'seven', 8: 'eight',
9: 'nine', 10: 'ten', 11: 'eleven', 12: 'twelve', 13: 'thirteen', 14: 'fourteen',
15: 'fifteen', 16: 'sixteen', 17: 'seventeen', 18: 'eighteen', 19: 'nineteen', 20: 'twenty',
30: 'thirty', 40: 'forty', 50: 'fifty', 60: 'sixty', 70: 'seventy', 80: 'eighty', 90: 'ninety',
100: 'hundred', 0: ''}
file_name = 'lesson_5_task_4_int_list.txt'
count_of_lines = 5
with open(file_name, 'w', encoding='utf-8') as file_obj:
for el in range(count_of_lines):
# rand_int = randint(1, 100)
rand_int = randint(1, 9)
if rand_int < 21 or rand_int == 100:
# rand_str = mapping_int_to_word[rand_int] # Choose this code, if you want generate numbers from 1 to 100
rand_str = mapping_int_to_word[rand_int]
else:
rand_ten, rand_digit = divmod(rand_int, 10)
rand_str = f'{mapping_int_to_word[rand_ten * 10]} {mapping_int_to_word[rand_digit]}'
print(f'{rand_str.capitalize()} - {rand_int}', file=file_obj)
| 42.625 | 119 | 0.568182 |
acf64753ce90d67b33e683929d10278e1796feac | 2,251 | py | Python | wasd_control.py | RoboClyde/leo_teleop | 53ce1b8b6c51186e08a355ab6ef129ac578c9f90 | [
"MIT"
] | null | null | null | wasd_control.py | RoboClyde/leo_teleop | 53ce1b8b6c51186e08a355ab6ef129ac578c9f90 | [
"MIT"
] | null | null | null | wasd_control.py | RoboClyde/leo_teleop | 53ce1b8b6c51186e08a355ab6ef129ac578c9f90 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist
import tty
import sys
import termios
import signal
orig_settings = termios.tcgetattr(sys.stdin)
def keyboardInterruptHandler(signal, frame):
print("Caught interrupt")
resetTerminal()
exit(0)
def resetTerminal():
tty.tcsetattr(sys.stdin, tty.TCSAFLUSH, orig_settings)
signal.signal(signal.SIGINT, keyboardInterruptHandler)
def controlls(letter,vel_msg,speed_l_f,speed_l_b,speed_a,vel_pub):
if (letter == "w"):
forwards(vel_msg,speed_l_f,vel_pub)
elif (letter == "s"):
backwards(vel_msg,speed_l_b,vel_pub)
elif (letter == "a"):
left(vel_msg,speed_a,vel_pub)
elif (letter == "d"):
right(vel_msg,speed_a,vel_pub)
elif (letter == "b"):
stop(vel_msg,vel_pub)
else:
return
def forwards(vel_msg,speed,vel_pub):
vel_msg.linear.x = speed
vel_msg.angular.z = 0
vel_pub.publish(vel_msg)
def backwards(vel_msg,speed,vel_pub):
vel_msg.linear.x = speed * -1
vel_msg.angular.z = 0
vel_pub.publish(vel_msg)
def left(vel_msg,speed,vel_pub):
vel_msg.linear.x = 0
vel_msg.angular.z = speed
vel_pub.publish(vel_msg)
def right(vel_msg,speed,vel_pub):
vel_msg.linear.x = 0
vel_msg.angular.z = speed * -1
vel_pub.publish(vel_msg)
def stop(vel_msg,vel_pub):
vel_msg.linear.x = 0
vel_msg.angular.z = 0
vel_pub.publish(vel_msg)
def main():
rospy.init_node('move_forward',anonymous=True)
velocity_publisher = rospy.Publisher('/controllers/diff_drive/cmd_vel', Twist, queue_size=10)
vel_msg = Twist()
linear_velocity_forwards = 10 # (m/s)
linear_velocity_backwards = 5 # (m/s)
angular_velocity = 5 # (rad/s)
# Initialising the data to send to rover
vel_msg.linear.x = 0
vel_msg.linear.y = 0
vel_msg.linear.z = 0
vel_msg.angular.x = 0
vel_msg.angular.y = 0
vel_msg.angular.z = 0
tty.setcbreak(sys.stdin)
x = 0
print("Press Esc or Ctrl C to exit...")
print("<--------------- Use the 'wasd' keys to control the rover, press 'b' to brake ---------------->")
while x != '\x1b': # Esc key
x = sys.stdin.read(1)[0]
controlls(x,vel_msg,linear_velocity_forwards,linear_velocity_backwards,angular_velocity,velocity_publisher)
resetTerminal()
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
| 23.447917 | 109 | 0.721901 |
acf64777bbb1b87fe47e27fca1c931d480c7ac41 | 2,742 | py | Python | Examples/10_mof_swelling/prepare_mof.py | tamaswells/pysimm | 2586679a9eacdf1046baa2312c8f92c9247ac5be | [
"MIT"
] | 55 | 2017-01-14T09:11:54.000Z | 2022-01-28T17:17:34.000Z | Examples/10_mof_swelling/prepare_mof.py | tamaswells/pysimm | 2586679a9eacdf1046baa2312c8f92c9247ac5be | [
"MIT"
] | 13 | 2017-03-27T19:32:57.000Z | 2022-01-04T17:49:50.000Z | Examples/10_mof_swelling/prepare_mof.py | tamaswells/pysimm | 2586679a9eacdf1046baa2312c8f92c9247ac5be | [
"MIT"
] | 28 | 2016-08-29T12:30:58.000Z | 2021-12-04T07:24:07.000Z | import requests
import re
from StringIO import StringIO
from pysimm import system, lmps, forcefield
try:
import pandas as pd
except ImportError:
pd = None
# Check whether the pandas installed or not
if not pd:
print('The script requires pandas to be installed. Exiting...')
exit(1)
# Requesting the XYZ of the unit MOF cell from the web-resource
resp = requests.get('https://raw.githubusercontent.com/WMD-group/BTW-FF/master/structures/IRMOF-14.xyz')
xyz = StringIO(resp.text)
# Parsing the text stream to form the xyz-like pandas table
df = pd.read_table(xyz, sep='\s+', names=['tag', 'type', 'x', 'y', 'z'], usecols=[0, 1, 2, 3, 4], skiprows=1)
# Retyping the atom names
df['type'] = df['type'].map(lambda vr: vr[0] if vr[0] != 'Z' else 'Zn')
# Writing XYZ
with file('irmof-14_clean.xyz', 'w') as f:
f.write(str(len(df)) + '\nThis is the place for the header of your XYZ file\n')
df[['type', 'x', 'y', 'z']].to_csv(f, sep='\t', header=False, index=False)
# Initial setup of the pysimm System with MOF
s = system.System()
tmp = resp.text.encode('ascii', 'ignore').split('\n')
for line in tmp[1:-1]:
data = line.split()
tag, ptype, x, y, z, restof = data[:6]
elem = re.sub('\d+', '', ptype)
bonds = map(int, data[6:])
p = system.Particle(tag=int(tag), elem=elem, type_name=ptype, x=float(x), y=float(y), z=float(z), bonds=bonds)
s.particles.add(p)
for p in s.particles:
for pb in p.bonds:
if p.tag < pb:
s.bonds.add(system.Bond(a=p, b=s.particles[pb]))
s.add_particle_bonding()
# Assign Dreiding forcefield parameters to the atoms of the structure
f = forcefield.Dreiding()
o_3 = s.particle_types.add(f.particle_types.get('O_3')[0].copy())
o_r = s.particle_types.add(f.particle_types.get('O_R')[0].copy())
c_r = s.particle_types.add(f.particle_types.get('C_R')[0].copy())
zn = s.particle_types.add(f.particle_types.get('Zn')[0].copy())
h_ = s.particle_types.add(f.particle_types.get('H_')[0].copy())
for p in s.particles:
if p.elem == 'O':
if p.bonds.count == 4:
p.type = o_3
else:
p.type = o_r
if p.elem == 'C':
p.type = c_r
if p.elem == 'Zn':
p.type = zn
if p.elem == 'H':
p.type = h_
f.assign_btypes(s)
f.assign_atypes(s)
f.assign_dtypes(s)
f.assign_itypes(s)
# Assign the calculation box size assuming it is cubic
cc_bnd_lngth = 1.363
dim = cc_bnd_lngth / 2 + max(df['x'].values) - min(df['x'].values)
s.dim = system.Dimension(dx=dim, dy=dim, dz=dim, center=[dim/2, dim/2, dim/2])
s.forcefield = 'dreiding-lj'
s.pair_style = 'lj'
s.bond_style = 'harmonic'
s.angle_style = 'harmonic'
s.dihedral_style = 'harmonic'
s.improper_style = 'harmonic'
s.write_lammps('irmof-14.lmps')
| 32.642857 | 114 | 0.649891 |
acf6480d5cdbe48e7b3df26c092381a2a7b347a2 | 628 | py | Python | manage.py | death-finger/get2unix | 1ff6f729f076040d6493251471cc0ee9cdcdc661 | [
"MIT"
] | null | null | null | manage.py | death-finger/get2unix | 1ff6f729f076040d6493251471cc0ee9cdcdc661 | [
"MIT"
] | null | null | null | manage.py | death-finger/get2unix | 1ff6f729f076040d6493251471cc0ee9cdcdc661 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'get2unix.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.545455 | 73 | 0.683121 |
acf64877a22f4f8d5e3f1135036efe29404a2fb4 | 763 | py | Python | tests/issues/test_issue523.py | wjsi/mars-profiling | 1accb00c90da67b46ad98ea1592fecc524625454 | [
"MIT"
] | 1 | 2021-01-21T09:11:58.000Z | 2021-01-21T09:11:58.000Z | tests/issues/test_issue523.py | wjsi/mars-profiling | 1accb00c90da67b46ad98ea1592fecc524625454 | [
"MIT"
] | null | null | null | tests/issues/test_issue523.py | wjsi/mars-profiling | 1accb00c90da67b46ad98ea1592fecc524625454 | [
"MIT"
] | 1 | 2020-12-30T08:32:29.000Z | 2020-12-30T08:32:29.000Z | """
Test for issue 523:
https://github.com/pandas-profiling/pandas-profiling/issues/XXX
"""
import pandas as pd
import pytest
from mars_profiling import ProfileReport
@pytest.mark.skipif(
int(pd.__version__.split(".")[0]) < 1, reason="requires pandas 1 or higher"
)
def test_issue523():
# https://github.com/pandas-dev/pandas/issues/33803
data = [
1871248,
12522551,
1489260,
6657093,
pd.NA,
pd.NA,
pd.NA,
pd.NA,
pd.NA,
1489260,
pd.NA,
2468576,
]
df = pd.DataFrame({"col": data}, dtype=pd.Int64Dtype())
profile_report = ProfileReport(df, title="Test Report")
assert len(profile_report.to_html()) > 0
| 21.8 | 80 | 0.576671 |
acf649864d2319c9067fc24551356ee821e5cf3e | 3,469 | py | Python | src/python/WMCore/BossAir/RunJob.py | vkuznet/WMCore | 001cc51651052405a7ecd811cde91da611b1dc57 | [
"Apache-2.0"
] | 21 | 2015-11-19T16:18:45.000Z | 2021-12-02T18:20:39.000Z | src/python/WMCore/BossAir/RunJob.py | vkuznet/WMCore | 001cc51651052405a7ecd811cde91da611b1dc57 | [
"Apache-2.0"
] | 5,671 | 2015-01-06T14:38:52.000Z | 2022-03-31T22:11:14.000Z | src/python/WMCore/BossAir/RunJob.py | vkuznet/WMCore | 001cc51651052405a7ecd811cde91da611b1dc57 | [
"Apache-2.0"
] | 67 | 2015-01-21T15:55:38.000Z | 2022-02-03T19:53:13.000Z | #!/usr/bin/env python
"""
_RunJob_
The runJob class object.
It is very simple.
"""
from WMCore.WMBS.Job import Job
class RunJob(dict):
"""
_RunJob_
Basically, create an organized dictionary with all
the necessary fields.
"""
def __init__(self, jobid=-1):
"""
Just make sure you init the dictionary fields.
If the field has no value, leave it as None so we can
overwrite it later.
"""
self.setdefault('id', None)
self.setdefault('jobid', jobid)
self.setdefault('gridid', None)
self.setdefault('bulkid', None)
self.setdefault('retry_count', 0)
self.setdefault('status', None)
self.setdefault('location', None)
self.setdefault('site_cms_name', None)
self.setdefault('userdn', None)
self.setdefault('usergroup', '')
self.setdefault('userrole', '')
self.setdefault('plugin', None)
self.setdefault('cache_dir', None)
self.setdefault('status_time', None)
self.setdefault('packageDir', None)
self.setdefault('sandbox', None)
self.setdefault('wf_priority', None)
self.setdefault('task_type', None)
self.setdefault('possibleSites', None)
self.setdefault('swVersion', None)
self.setdefault('scramArch', None)
self.setdefault('siteName', None)
self.setdefault('name', None)
self.setdefault('proxyPath', None)
self.setdefault('request_name', None)
self.setdefault('estimatedJobTime', None)
self.setdefault('estimatedDiskUsage', None)
self.setdefault('estimatedMemoryUsage', None)
self.setdefault('numberOfCores', 1)
self.setdefault('taskPriority', None)
self.setdefault('task_name', None)
self.setdefault('task_id', None)
self.setdefault('potentialSites', None)
self.setdefault('inputDataset', None)
self.setdefault('inputDatasetLocations', None)
self.setdefault('inputPileup', None)
self.setdefault('allowOpportunistic', False)
self.setdefault('activity', None)
self.setdefault('requiresGPU', 'forbidden')
self.setdefault('gpuRequirements', None)
self.setdefault('requestType', None)
return
def buildFromJob(self, job):
"""
_buildFromJob_
Build a RunJob from a WMBS Job
"""
# These two are required
self['jobid'] = job.get('id', None)
self['retry_count'] = job.get('retry_count', None)
self['userdn'] = job.get('owner', None)
self['usergroup'] = job.get('usergroup', '')
self['userrole'] = job.get('userrole', '')
self['siteName'] = job.get('custom', {}).get('location', None)
# Update the job with all other shared keys
for key in job:
if key in self:
self[key] = job[key]
return
def buildWMBSJob(self):
"""
_buildWMBSJob_
Does exactly what it sounds like
Also, attach couch_record (since we usually need one)
"""
job = Job(id=self['jobid'])
job['retry_count'] = self['retry_count']
job['couch_record'] = None
job['owner'] = self['userdn']
job['usergroup'] = self['usergroup']
job['userrole'] = self['userrole']
for key in self:
if key != 'id':
job[key] = self[key]
return job
| 29.398305 | 70 | 0.589795 |
acf649f53ea012a787355158f0fe9a1b777f977f | 6,064 | py | Python | Travelling Salesman Problem/TSP.py | satvik-tiwari/Genetic-Algorithm | 496b285c1bf9f3634d2e565616b9188e6741b35e | [
"Apache-2.0"
] | null | null | null | Travelling Salesman Problem/TSP.py | satvik-tiwari/Genetic-Algorithm | 496b285c1bf9f3634d2e565616b9188e6741b35e | [
"Apache-2.0"
] | null | null | null | Travelling Salesman Problem/TSP.py | satvik-tiwari/Genetic-Algorithm | 496b285c1bf9f3634d2e565616b9188e6741b35e | [
"Apache-2.0"
] | 4 | 2019-10-20T15:32:56.000Z | 2022-01-05T15:27:03.000Z | #importing necessary libraries
import numpy as np
import random as rd
import pandas as pd
import matplotlib.pyplot as plt
data = pd.read_csv('TSP.csv')
distances = data.iloc[:, 1:].values
distances.shape
data.head()
print(distances)
#Creating initial Population
num_generations = 500
solutions_per_pop = 8
pop_size = (solutions_per_pop, distances.shape[1])
initial_pop = np.empty(pop_size)
for i in range(pop_size[0]):
initial_pop[i, :] = (rd.sample(range(1,16), pop_size[1]))
initial_pop = initial_pop.astype(int)
print('Initial Population: \n {0}'.format(initial_pop))
print(initial_pop.shape)
print(distances.shape)
def cal_fitness(population, distances):
fitness = np.empty((len(population), 1))
for i in range(len(population)):
cost = 0
for j in range(population.shape[1] - 1):
city_1 = population[i][j]
city_2 = population[i][j+1]
cost = cost + distances[city_1 - 1][city_2 - 1]
fitness[i][0] = cost
return fitness
def selection(population, fitness, num_parents):
parents = np.empty((num_parents, population.shape[1]))
for i in range(num_parents):
(individual_1, individual_2) = rd.sample(range(1,population.shape[0]+1), 2)
if fitness[individual_1 - 1][0] < fitness[individual_2 - 1][0]:
parent_idx = individual_1 - 1
else:
parent_idx = individual_2 - 1
parents[i, :] = population[parent_idx, :]
return parents
def search(swath, element):
for i in range(len(swath)):
if swath[i] == element:
return True
return False
#Performing crossover using PMX
def crossover(parents, num_offsprings):
offsprings = np.full((num_offsprings, parents.shape[1]), -999, dtype=int)
i = 0
while i < parents.shape[0]:
parent1 = parents[i, :]
parent2 = parents[i+1, :]
num_children = 1
while num_children < 3:
idx1 = rd.randint(0, parents.shape[1] - 2)
idx2 = rd.randint(idx1+1, parents.shape[1] - 1)
swath_p1 = parent1[idx1:idx2+1]
swath_p2 = parent2[idx1:idx2+1]
offsprings[i, idx1:idx2+1] = swath_p1
for j in range(idx1, idx2+1, 1):
p2_pos = p1_pos = j
p2_element = parent2[j];
if not search(swath_p1, p2_element):
flag = False
while not flag:
p1_element = parent1[p2_pos]
if search(swath_p2, p1_element):
p2_element = p1_element
p2_pos = np.where(parent2 == p1_element)
continue
flag = True
offsprings[i, np.where(parent2 == p1_element)] = parent2[j]
for j in range(offsprings.shape[1]):
if offsprings[i, j] == -999:
offsprings[i, j] = parent2[j]
parent1, parent2 = parent2, parent1
i += 1
num_children += 1
return offsprings
def mutation(offsprings):
mutation_rate = 0.40
mutants = np.empty(offsprings.shape)
for i in range(len(offsprings)):
mutants[i, :] = offsprings[i, :]
random_value = rd.random()
if random_value > mutation_rate:
continue
idx1, idx2 = rd.sample(range(0, offsprings.shape[1]), 2)
mutants[i][idx1], mutants[i][idx2] = mutants[i][idx2], mutants[i][idx1]
return mutants
def new_population(curr_population, distances, mutants, fitness_curr_pop):
total_fitness = np.empty((len(curr_population) + len(mutants), 1))
new_population = np.empty((curr_population.shape))
fitness_mutants = cal_fitness(mutants, distances)
total_fitness[0:len(curr_population), 0] = fitness_curr_pop[:, 0]
total_fitness[len(curr_population): , 0] = fitness_mutants[:, 0]
for i in range(len(new_population)):
fittest_individual_idx = np.argmin(total_fitness)
if fittest_individual_idx < len(curr_population):
new_population[i, :] = curr_population[fittest_individual_idx, :]
else:
new_population[i, :] = mutants[fittest_individual_idx - len(curr_population), :]
total_fitness[fittest_individual_idx] = 99999999
return new_population
def genetic_algorithm(population, distances, pop_size, num_generations):
fitness_history, fittest_individual = [], []
num_offsprings = num_parents = len(population)
for i in range(num_generations):
fitness = cal_fitness(population, distances)
fitness_history.append(fitness)
parents = selection(population, fitness, num_parents)
offsprings = crossover(parents, num_offsprings)
mutants = mutation(offsprings)
mutants = mutants.astype(int)
population = new_population(population, distances, mutants, fitness)
population = population.astype(int)
print('Last generation: \n{}\n'.format(population))
fitness_last_gen = cal_fitness(population, distances)
print('Fitness of the last generation: \n{}\n'.format(fitness_last_gen.astype(int)))
max_fitness = np.where(fitness_last_gen == np.max(fitness_last_gen))
fittest_individual.append(population[max_fitness[0][0],:])
return fitness_history, fittest_individual
fitness_history, calculated_path = genetic_algorithm(initial_pop, distances, pop_size, num_generations)
print('The path to be taken by salesman as calculated by Genetic Algorithm:\n{}'.format(list(calculated_path)))
fitness_history_mean = [np.mean(fitness) for fitness in fitness_history]
fitness_history_max = [np.max(fitness) for fitness in fitness_history]
plt.plot(list(range(num_generations)), fitness_history_mean, label = 'Mean Fitness')
plt.plot(list(range(num_generations)), fitness_history_max, label = 'Max Fitness')
plt.legend()
plt.title('Fitness through the generations')
plt.xlabel('Generations')
plt.ylabel('Fitness')
plt.show()
| 38.138365 | 111 | 0.638522 |
acf64a4574cea0415ec5c08e8b313587dc1ed550 | 7,743 | py | Python | Image Inpainting/Globally and Locally Consistent Image Completion/models.py | Xiefan-Guo/Paper-PyTorch | 5dbb68ba78f427b56e75ddbd95a68475951e1514 | [
"MIT"
] | 2 | 2021-12-29T03:02:15.000Z | 2021-12-29T06:31:18.000Z | Image Inpainting/Globally and Locally Consistent Image Completion/models.py | Xiefan-Guo/Paper-PyTorch | 5dbb68ba78f427b56e75ddbd95a68475951e1514 | [
"MIT"
] | null | null | null | Image Inpainting/Globally and Locally Consistent Image Completion/models.py | Xiefan-Guo/Paper-PyTorch | 5dbb68ba78f427b56e75ddbd95a68475951e1514 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
class CompletionNetwork(nn.Module):
def __init__(self):
super(CompletionNetwork, self).__init__()
def conv_block(in_channels, out_channels, kernel_size, stride, padding):
layers = [
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding),
nn.BatchNorm2d(out_channels),
nn.ReLU()
]
return layers
def dilated_conv_block(in_channels, out_channels, kernel_size, stride, padding, dilation):
layers = [
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, dilation=dilation),
nn.BatchNorm2d(out_channels),
nn.ReLU()
]
return layers
def deconv_block(in_channels, out_channels, kernel_size, stride, padding):
layers = [
nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding),
nn.BatchNorm2d(out_channels),
nn.ReLU()
]
return layers
def output(in_channels, out_channels, kernel_size, stride, padding):
layers = [
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding),
nn.Sigmoid()
]
return layers
self.model = nn.Sequential(
*conv_block(4, 64, 5, 1, 2), # input size: (batch_size, 4, img_h, img_w)
*conv_block(64, 128, 3, 2, 1), # input size: (batch_size, 64, img_h, img_w)
*conv_block(128, 128, 3, 1, 1), # input size: (batch_size, 128, img_h//2, img_w//2)
*conv_block(128, 256, 3, 2, 1), # input size: (batch_size, 128, img_h//2, img_w//2)
*conv_block(256, 256, 3, 1, 1), # input size: (batch_size, 256, img_h//4, img_w//4)
*conv_block(256, 256, 3, 1, 1), # input size: (batch_size, 256, img_h//4, img_w//4)
*dilated_conv_block(256, 256, 3, 1, 2, 2), # input size: (batch_size, 256, img_h//4, img_w//4)
*dilated_conv_block(256, 256, 3, 1, 4, 4), # input size: (batch_size, 256, img_h//4, img_w//4)
*dilated_conv_block(256, 256, 3, 1, 8, 8), # input size: (batch_size, 256, img_h//4, img_w//4)
*dilated_conv_block(256, 256, 3, 1, 16, 16), # input size: (batch_size, 256, img_h//4, img_w//4)
*conv_block(256, 256, 3, 1, 1), # input size: (batch_size, 256, img_h//4, img_w//4)
*conv_block(256, 256, 3, 1, 1), # input size: (batch_size, 256, img_h//4, img_w//4)
*deconv_block(256, 128, 4, 2, 1), # input size: (batch_size, 256, img_h//4, img_w//4)
*conv_block(128, 128, 3, 1, 1), # input size: (batch_size, 128, img_h//2, img_w//2)
*deconv_block(128, 64, 4, 2, 1), # input size: (batch_size, 128, img_h//2, img_w//2)
*conv_block(64, 32, 3, 1, 1), # input size: (batch_size, 64, img_h, img_w)
*output(32, 3, 3, 1, 1) # input size: (batch_size, 32, img_h, img_w)
# output size: (batch_size, 3, img_h, img_w)
)
def forward(self, imgs):
imgs = self.model(imgs)
return imgs
class LocalDiscriminator(nn.Module):
def __init__(self, input_size):
super(LocalDiscriminator, self).__init__()
def conv_block(in_channels, out_channels, kernel_size, stride, padding):
layers = [
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding),
nn.BatchNorm2d(out_channels),
nn.ReLU()
]
return layers
self.in_channels, self.img_h, self.img_w = input_size[0], input_size[1], input_size[2]
self.conv = nn.Sequential(
*conv_block(self.in_channels, 64, 5, 2, 2), # input size: (batch_size, in_channels, img_h, img_w)
*conv_block(64, 128, 5, 2, 2), # input size: (batch_size, 64, img_h//2, img_w//2)
*conv_block(128, 256, 5, 2, 2), # input size: (batch_size, 128, img_h//4, img_w//4)
*conv_block(256, 512, 5, 2, 2), # input size: (batch_size, 256, img_h//8, img_w//8)
*conv_block(512, 512, 5, 2, 2) # input size: (batch_size, 512, img_h//16, img_w//16)
)
in_features = 512 * (self.img_h // 32) * (self.img_w // 32)
# input size: (batch_size, 512, img_h//32, img_w//32)
self.fc = nn.Sequential(
nn.Linear(in_features, 1024),
nn.ReLU()
)
# output size: (batch_size, 1024)
def forward(self, imgs):
imgs = self.conv(imgs)
imgs = imgs.view(imgs.size(0), -1)
validity = self.fc(imgs)
return validity
class GlobalDiscriminator(nn.Module):
def __init__(self, input_size, dataset="celeba"):
super(GlobalDiscriminator, self).__init__()
def conv_block(in_channels, out_channels, kernel_size, stride, padding):
layers = [
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding),
nn.BatchNorm2d(out_channels),
nn.ReLU()
]
return layers
self.in_channels, self.img_h, self.img_w = input_size[0], input_size[1], input_size[2]
self.dataset = dataset
self.conv1 = nn.Sequential(
*conv_block(self.in_channels, 64, 5, 2, 2), # input size: (batch_size, 32, img_h, img_w)
*conv_block(64, 128, 5, 2, 2), # input size: (batch_size, 64, img_h//2, img_w//2)
*conv_block(128, 256, 5, 2, 2), # input size: (batch_size, 128, img_h//4, img_w//4)
*conv_block(256, 512, 5, 2, 2), # input size: (batch_size, 256, img_h//8, img_w//8)
*conv_block(512, 512, 5, 2, 2) # input size: (batch_size, 512, img_h//16, img_w//16)
)
# input size: (batch_size, 512, img_h//32, img_w//32)
if dataset == "celeba":
in_features = 512 * (self.img_h // 32) * (self.img_w // 32)
self.fc = nn.Sequential(
nn.Linear(in_features, 1024),
nn.ReLU()
)
elif dataset == "places2":
self.conv2 = nn.Sequential(
*conv_block(512, 512, 5, 2, 2)
)
in_features = 512 * (self.img_h // 64) * (self.img_w // 64)
self.fc = nn.Sequential(
nn.Linear(in_features, 1024),
nn.ReLU()
)
else:
raise ValueError("Unsupported architecture for %s" % (dataset))
# output size: (batch_size, 1024)
def forward(self, imgs):
imgs = self.conv1(imgs)
if self.dataset == "celeba":
imgs = imgs.view(imgs.size(0), -1)
validity = self.fc(imgs)
elif self.dataset == "places2":
imgs = self.conv2(imgs)
imgs = imgs.view(imgs.size(0), -1)
validity = self.fc(imgs)
return validity
class ContextDiscriminator(nn.Module):
def __init__(self, local_img_size, global_img_size, dataset="celeba"):
super(ContextDiscriminator, self).__init__()
self.local_model = LocalDiscriminator(local_img_size)
self.global_model = GlobalDiscriminator(global_img_size, dataset=dataset)
self.fc = nn.Sequential(
nn.Linear(2048, 1),
nn.Sigmoid()
)
# output size: (batch_size, 1)
def forward(self, imgs):
local_imgs, global_imgs = imgs
local_validity = self.local_model(local_imgs)
global_validity = self.global_model(global_imgs)
validity = torch.cat((local_validity, global_validity), -1)
validity = self.fc(validity)
return validity
net = CompletionNetwork()
print(net) | 40.328125 | 110 | 0.563993 |
acf64abacd5c47f15979931437ca915fed51e17a | 1,449 | py | Python | examples/gtk.py | zonbrisad/pyplate | 177255d35d2c82cafdd33f96e4bc4229317705d4 | [
"FTL",
"RSA-MD"
] | null | null | null | examples/gtk.py | zonbrisad/pyplate | 177255d35d2c82cafdd33f96e4bc4229317705d4 | [
"FTL",
"RSA-MD"
] | null | null | null | examples/gtk.py | zonbrisad/pyplate | 177255d35d2c82cafdd33f96e4bc4229317705d4 | [
"FTL",
"RSA-MD"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
#
# Example of gtk bindings.
#
# File: gtk.py
# Author: Peter Malmberg <peter.malmberg@gmail.com>
# Date: 2022-05-25
# License: MIT
# Python: 3
#
#----------------------------------------------------------------------------
# Pyplate
# This file is generated from pyplate Python template generator.
#
# Pyplate is developed by:
# Peter Malmberg <peter.malmberg@gmail.com>
#
# Available at:
# https://github.com/zobrisad/pyplate.git
#
# ---------------------------------------------------------------------------
#
# Imports -------------------------------------------------------------------
import sys
import os
import traceback
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk
# Code ----------------------------------------------------------------------
def main():
window = Gtk.Window(title="Hello World")
window.show()
window.connect("delete-event", Gtk.main_quit)
Gtk.main()
# Main program handle
if __name__ == "__main__":
try:
main()
sys.exit(0)
except KeyboardInterrupt as e: # Ctrl-C
raise e
except SystemExit as e: # sys.exit()
raise e
except Exception as e:
print('ERROR, UNEXPECTED EXCEPTION')
print(str(e))
traceback.print_exc()
os._exit(1)
| 24.15 | 77 | 0.462388 |
acf64acd91d0226ab2959ba1faef20b12c75722e | 6,343 | py | Python | tools/json_schema_compiler/compiler.py | pozdnyakov/chromium-crosswalk | 0fb25c7278bf1d93e53a3b0bcb75aa8b99d4b26e | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 9 | 2018-09-21T05:36:12.000Z | 2021-11-15T15:14:36.000Z | tools/json_schema_compiler/compiler.py | pozdnyakov/chromium-crosswalk | 0fb25c7278bf1d93e53a3b0bcb75aa8b99d4b26e | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | tools/json_schema_compiler/compiler.py | pozdnyakov/chromium-crosswalk | 0fb25c7278bf1d93e53a3b0bcb75aa8b99d4b26e | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 3 | 2018-11-28T14:54:13.000Z | 2020-07-02T07:36:07.000Z | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generator for C++ structs from api json files.
The purpose of this tool is to remove the need for hand-written code that
converts to and from base::Value types when receiving javascript api calls.
Originally written for generating code for extension apis. Reference schemas
are in chrome/common/extensions/api.
Usage example:
compiler.py --root /home/Work/src --namespace extensions windows.json
tabs.json
compiler.py --destdir gen --root /home/Work/src
--namespace extensions windows.json tabs.json
"""
import optparse
import os
import sys
from cpp_bundle_generator import CppBundleGenerator
from cpp_generator import CppGenerator
from cpp_type_generator import CppTypeGenerator
from dart_generator import DartGenerator
import json_schema
from model import Model, UnixName
from schema_loader import SchemaLoader
# Names of supported code generators, as specified on the command-line.
# First is default.
GENERATORS = ['cpp', 'cpp-bundle', 'dart']
def GenerateSchema(generator,
filenames,
root,
destdir,
root_namespace,
dart_overrides_dir):
schema_loader = SchemaLoader(os.path.dirname(os.path.relpath(
os.path.normpath(filenames[0]), root)))
# Merge the source files into a single list of schemas.
api_defs = []
for filename in filenames:
schema = os.path.normpath(filename)
schema_filename, schema_extension = os.path.splitext(schema)
path, short_filename = os.path.split(schema_filename)
api_def = schema_loader.LoadSchema(schema)
# If compiling the C++ model code, delete 'nocompile' nodes.
if generator == 'cpp':
api_def = json_schema.DeleteNodes(api_def, 'nocompile')
api_defs.extend(api_def)
api_model = Model()
# For single-schema compilation make sure that the first (i.e. only) schema
# is the default one.
default_namespace = None
# Load the actual namespaces into the model.
for target_namespace, schema_filename in zip(api_defs, filenames):
relpath = os.path.relpath(os.path.normpath(schema_filename), root)
namespace = api_model.AddNamespace(target_namespace,
relpath,
include_compiler_options=True)
if default_namespace is None:
default_namespace = namespace
path, filename = os.path.split(schema_filename)
short_filename, extension = os.path.splitext(filename)
# Filenames are checked against the unix_names of the namespaces they
# generate because the gyp uses the names of the JSON files to generate
# the names of the .cc and .h files. We want these to be using unix_names.
if namespace.unix_name != short_filename:
sys.exit("Filename %s is illegal. Name files using unix_hacker style." %
schema_filename)
# The output filename must match the input filename for gyp to deal with it
# properly.
out_file = namespace.unix_name
# Construct the type generator with all the namespaces in this model.
type_generator = CppTypeGenerator(api_model,
schema_loader,
default_namespace=default_namespace)
if generator == 'cpp-bundle':
cpp_bundle_generator = CppBundleGenerator(root,
api_model,
api_defs,
type_generator,
root_namespace)
generators = [
('generated_api.cc', cpp_bundle_generator.api_cc_generator),
('generated_api.h', cpp_bundle_generator.api_h_generator),
('generated_schemas.cc', cpp_bundle_generator.schemas_cc_generator),
('generated_schemas.h', cpp_bundle_generator.schemas_h_generator)
]
elif generator == 'cpp':
cpp_generator = CppGenerator(type_generator, root_namespace)
generators = [
('%s.h' % namespace.unix_name, cpp_generator.h_generator),
('%s.cc' % namespace.unix_name, cpp_generator.cc_generator)
]
elif generator == 'dart':
generators = [
('%s.dart' % namespace.unix_name, DartGenerator(
dart_overrides_dir))
]
else:
raise Exception('Unrecognised generator %s' % generator)
output_code = []
for filename, generator in generators:
code = generator.Generate(namespace).Render()
if destdir:
with open(os.path.join(destdir, namespace.source_file_dir,
filename), 'w') as f:
f.write(code)
output_code += [filename, '', code, '']
return '\n'.join(output_code)
if __name__ == '__main__':
parser = optparse.OptionParser(
description='Generates a C++ model of an API from JSON schema',
usage='usage: %prog [option]... schema')
parser.add_option('-r', '--root', default='.',
help='logical include root directory. Path to schema files from specified'
'dir will be the include path.')
parser.add_option('-d', '--destdir',
help='root directory to output generated files.')
parser.add_option('-n', '--namespace', default='generated_api_schemas',
help='C++ namespace for generated files. e.g extensions::api.')
parser.add_option('-g', '--generator', default=GENERATORS[0],
choices=GENERATORS,
help='The generator to use to build the output code. Supported values are'
' %s' % GENERATORS)
parser.add_option('-D', '--dart-overrides-dir', dest='dart_overrides_dir',
help='Adds custom dart from files in the given directory (Dart only).')
(opts, filenames) = parser.parse_args()
if not filenames:
sys.exit(0) # This is OK as a no-op
# Unless in bundle mode, only one file should be specified.
if opts.generator != 'cpp-bundle' and len(filenames) > 1:
# TODO(sashab): Could also just use filenames[0] here and not complain.
raise Exception(
"Unless in bundle mode, only one file can be specified at a time.")
result = GenerateSchema(opts.generator, filenames, opts.root, opts.destdir,
opts.namespace, opts.dart_overrides_dir)
if not opts.destdir:
print result
| 39.64375 | 80 | 0.672395 |
acf64b910771903914418fc094b8206c04315b64 | 12,797 | py | Python | clients/client/python/ory_client/rest.py | GRoguelon/sdk | b0f50192b90047e5b7cac9e3fb565c050be95403 | [
"Apache-2.0"
] | null | null | null | clients/client/python/ory_client/rest.py | GRoguelon/sdk | b0f50192b90047e5b7cac9e3fb565c050be95403 | [
"Apache-2.0"
] | null | null | null | clients/client/python/ory_client/rest.py | GRoguelon/sdk | b0f50192b90047e5b7cac9e3fb565c050be95403 | [
"Apache-2.0"
] | null | null | null | """
Ory APIs
Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers. # noqa: E501
The version of the OpenAPI document: v0.0.1-alpha.46
Contact: support@ory.sh
Generated by: https://openapi-generator.tech
"""
import io
import json
import logging
import re
import ssl
from urllib.parse import urlencode
import urllib3
from ory_client.exceptions import ApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, ApiValueError
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if configuration.retries is not None:
addition_pool_args['retries'] = configuration.retries
if configuration.socket_options is not None:
addition_pool_args['socket_options'] = configuration.socket_options
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=configuration.ssl_ca_cert,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
proxy_headers=configuration.proxy_headers,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=configuration.ssl_ca_cert,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ApiValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, float)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
# Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests
if (method != 'DELETE') and ('Content-Type' not in headers):
headers['Content-Type'] = 'application/json'
if query_params:
url += '?' + urlencode(query_params)
if ('Content-Type' not in headers) or (re.search('json', headers['Content-Type'], re.IGNORECASE)):
request_body = None
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str) or isinstance(body, bytes):
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
if r.status == 401:
raise UnauthorizedException(http_resp=r)
if r.status == 403:
raise ForbiddenException(http_resp=r)
if r.status == 404:
raise NotFoundException(http_resp=r)
if 500 <= r.status <= 599:
raise ServiceException(http_resp=r)
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
| 43.527211 | 194 | 0.54755 |
acf64c5bf7815e5d312eeb63334a7691b0a5424c | 3,415 | bzl | Python | apple/internal/partials/framework_import.bzl | cclauss/rules_apple | 09308d581b6fb5df939dfccfdff21358d0f807d8 | [
"Apache-2.0"
] | 2 | 2019-09-01T06:06:40.000Z | 2020-11-10T00:37:01.000Z | apple/internal/partials/framework_import.bzl | cclauss/rules_apple | 09308d581b6fb5df939dfccfdff21358d0f807d8 | [
"Apache-2.0"
] | null | null | null | apple/internal/partials/framework_import.bzl | cclauss/rules_apple | 09308d581b6fb5df939dfccfdff21358d0f807d8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Partial implementation for framework import file processing."""
load(
"@build_bazel_rules_apple//apple/internal:apple_framework_import.bzl",
"AppleFrameworkImportInfo",
)
load(
"@build_bazel_rules_apple//apple/internal:processor.bzl",
"processor",
)
load(
"@build_bazel_rules_apple//common:path_utils.bzl",
"path_utils",
)
load(
"@bazel_skylib//lib:partial.bzl",
"partial",
)
load(
"@bazel_skylib//lib:paths.bzl",
"paths",
)
def _framework_import_partial_impl(ctx, targets, targets_to_avoid):
"""Implementation for the framework import file processing partial."""
_ignored = [ctx]
transitive_sets = [
x[AppleFrameworkImportInfo].framework_imports
for x in targets
if AppleFrameworkImportInfo in x
]
files_to_bundle = depset(transitive = transitive_sets).to_list()
if targets_to_avoid:
avoid_transitive_sets = [
x[AppleFrameworkImportInfo].framework_imports
for x in targets_to_avoid
if AppleFrameworkImportInfo in x
]
if avoid_transitive_sets:
avoid_files = depset(transitive = avoid_transitive_sets).to_list()
# Remove any files present in the targets to avoid from framework files that need to be
# bundled.
files_to_bundle = [x for x in files_to_bundle if x not in avoid_files]
bundle_files = []
for file in files_to_bundle:
framework_path = path_utils.farthest_directory_matching(file.short_path, "framework")
framework_relative_path = paths.relativize(file.short_path, framework_path)
parent_dir = paths.basename(framework_path)
framework_relative_dir = paths.dirname(framework_relative_path).strip("/")
if framework_relative_dir:
parent_dir = paths.join(parent_dir, framework_relative_dir)
bundle_files.append(
(processor.location.framework, parent_dir, depset([file])),
)
return struct(bundle_files = bundle_files)
def framework_import_partial(targets, targets_to_avoid = []):
"""Constructor for the framework import file processing partial.
This partial propagates framework import file bundle locations. The files are collected through
the framework_import_aspect aspect.
Args:
targets: The list of targets through which to collect the framework import files.
targets_to_avoid: The list of targets that may already be bundling some of the frameworks,
to be used when deduplicating frameworks already bundled.
Returns:
A partial that returns the bundle location of the framework import files.
"""
return partial.make(
_framework_import_partial_impl,
targets = targets,
targets_to_avoid = targets_to_avoid,
)
| 35.206186 | 99 | 0.712152 |
acf64ca4e60c247ce2390e319a8c90e850b4273f | 204 | py | Python | pyramda/__init__.py | sergiors/pyramda | 5bf200888809b1bc946e813e29460f204bccd13e | [
"MIT"
] | 124 | 2015-07-30T21:34:25.000Z | 2022-02-19T08:45:50.000Z | pyramda/__init__.py | sergiors/pyramda | 5bf200888809b1bc946e813e29460f204bccd13e | [
"MIT"
] | 37 | 2015-08-31T23:02:20.000Z | 2022-02-04T04:45:28.000Z | pyramda/__init__.py | sergiors/pyramda | 5bf200888809b1bc946e813e29460f204bccd13e | [
"MIT"
] | 20 | 2015-08-04T18:59:09.000Z | 2021-12-13T08:08:59.000Z | from .dictionary import *
from .function import *
from .iterable import *
from .logic import *
from .math import *
from .relation import *
from .getattr import getattr
from .isinstance import isinstance
| 20.4 | 34 | 0.769608 |
acf64d0447667fd32cc8f4d19ab98fdb7e8d7b90 | 4,506 | py | Python | loggers_control/scripts/train_de_dqn_homo.py | linZHank/two_loggers | 34b02e443681ddabe796d73863b24b5499168895 | [
"MIT"
] | 4 | 2019-06-09T08:25:25.000Z | 2020-09-19T01:04:49.000Z | loggers_control/scripts/train_de_dqn_homo.py | linZHank/two_loggers | 34b02e443681ddabe796d73863b24b5499168895 | [
"MIT"
] | 7 | 2019-02-01T21:52:27.000Z | 2020-11-02T03:46:21.000Z | loggers_control/scripts/train_de_dqn_homo.py | linZHank/two_loggers | 34b02e443681ddabe796d73863b24b5499168895 | [
"MIT"
] | 5 | 2019-05-06T15:21:25.000Z | 2021-09-22T02:48:33.000Z | #! /usr/bin/env python
from __future__ import absolute_import, division, print_function
import sys
import os
import numpy as np
import time
from datetime import datetime
import matplotlib.pyplot as plt
import rospy
import tensorflow as tf
from envs.de import DoubleEscape
from agents.dqn import ReplayBuffer, DeepQNet
if __name__=='__main__':
env = DoubleEscape()
dim_obs = env.observation_space_shape[1]*2
num_act = env.action_reservoir.shape[0]
agent = DeepQNet(
dim_obs=dim_obs,
num_act=num_act,
lr=1e-3,
polyak=0.98
)
replay_buffer = ReplayBuffer(dim_obs=dim_obs, size=int(1e6))
model_dir = os.path.join(sys.path[0], 'saved_models', env.name, agent.name, datetime.now().strftime("%Y-%m-%d-%H-%M"))
# tensorboard
train_loss = tf.keras.metrics.Mean('train_loss', dtype=tf.float32)
summary_writer = tf.summary.create_file_writer(model_dir)
summary_writer.set_as_default()
# params
batch_size = 128
train_freq = 100
train_after = 20000
warmup_episodes = 500
decay_period = 1500
replay_buffer = ReplayBuffer(dim_obs=agent.dim_obs, size=int(1e6))
total_steps = int(5e6)
episodic_returns = []
sedimentary_returns = []
episodic_steps = []
step_counter = 0
episode_counter = 0
success_counter = 0
save_freq = 1000
obs, done, ep_ret, ep_len = env.reset(), False, 0, 0
start_time = time.time()
# start training
while step_counter <= total_steps:
while 'blown' in env.status:
obs, ep_ret, ep_len = env.reset(), 0, 0
s0 = obs[[0,-1]].flatten()
s1 = obs[[1,-1]].flatten()
a0 = np.squeeze(agent.act(np.expand_dims(s0, axis=0)))
a1 = np.squeeze(agent.act(np.expand_dims(s1, axis=0)))
n_obs, rew, done, info = env.step(np.array([int(a0), int(a1)]))
n_s0 = n_obs[[0,-1]].flatten()
n_s1 = n_obs[[1,-1]].flatten()
rospy.logdebug("\nstate: {} \naction: {} \nreward: {} \ndone: {} \nn_state: {}".format(obs, (a0, a1), rew, done, n_obs))
ep_ret += np.sum(rew)
ep_len += 1
replay_buffer.store(s0, a0, np.sum(rew), done, n_s0)
replay_buffer.store(s1, a1, np.sum(rew), done, n_s1)
obs = n_obs.copy() # SUPER CRITICAL
step_counter += 1
# train one batch
if not step_counter%train_freq and step_counter>train_after:
for _ in range(train_freq):
minibatch = replay_buffer.sample_batch(batch_size=batch_size)
loss_q = agent.train_one_batch(data=minibatch)
print("\nloss_q: {}".format(loss_q))
# handle episode termination
if done or (ep_len==env.max_episode_steps):
episode_counter +=1
episodic_returns.append(ep_ret)
sedimentary_returns.append(sum(episodic_returns)/episode_counter)
episodic_steps.append(step_counter)
if info.count('escaped')==2:
success_counter += 1
rospy.loginfo("\n====\nEpisode: {} \nEpisodeLength: {} \nTotalSteps: {} \nEpisodeReturn: {} \nSucceeded: {} \nSedimentaryReturn: {} \nTimeElapsed: {} \n====\n".format(episode_counter, ep_len, step_counter, ep_ret, success_counter, sedimentary_returns[-1], time.time()-start_time))
tf.summary.scalar("episode return", ep_ret, step=episode_counter)
# save model
if not episode_counter%save_freq or step_counter==total_steps:
model_path = os.path.join(model_dir, str(episode_counter))
if not os.path.exists(os.path.dirname(model_path)):
os.makedirs(os.path.dirname(model_path))
agent.q.q_net.save(model_path)
# Save returns
np.save(os.path.join(model_dir, 'episodic_returns.npy'), episodic_returns)
np.save(os.path.join(model_dir, 'sedimentary_returns.npy'), sedimentary_returns)
np.save(os.path.join(model_dir, 'episodic_steps.npy'), episodic_steps)
with open(os.path.join(model_dir, 'training_time.txt'), 'w') as f:
f.write("{}".format(time.time()-start_time))
# reset env
obs, done, ep_ret, ep_len = env.reset(), False, 0, 0
agent.linear_epsilon_decay(episode_counter, decay_period, warmup_episodes)
# plot returns
fig, ax = plt.subplots(figsize=(8, 6))
fig.suptitle('Averaged Returns')
ax.plot(sedimentary_returns)
plt.show()
| 41.722222 | 292 | 0.630715 |
acf64e438843ed57e73e8b5e6ff7499c476a3f31 | 3,708 | py | Python | ichnaea/data/tests/test_stats.py | JaredKerim-Mozilla/ichnaea | cfaef2b903960374050be3ea2e4c1520687de56b | [
"Apache-1.1"
] | null | null | null | ichnaea/data/tests/test_stats.py | JaredKerim-Mozilla/ichnaea | cfaef2b903960374050be3ea2e4c1520687de56b | [
"Apache-1.1"
] | null | null | null | ichnaea/data/tests/test_stats.py | JaredKerim-Mozilla/ichnaea | cfaef2b903960374050be3ea2e4c1520687de56b | [
"Apache-1.1"
] | null | null | null | from datetime import timedelta
from ichnaea.cache import redis_pipeline
from ichnaea.data.tasks import update_statcounter
from ichnaea.models.content import (
Stat,
StatCounter,
StatKey,
)
from ichnaea.tests.base import CeleryTestCase
from ichnaea import util
class TestStatCounter(CeleryTestCase):
def setUp(self):
super(TestStatCounter, self).setUp()
self.today = util.utcnow().date()
self.yesterday = self.today - timedelta(1)
self.two_days = self.today - timedelta(2)
def add_counter(self, stat_key, time, value):
stat_counter = StatCounter(stat_key, time)
with redis_pipeline(self.redis_client) as pipe:
stat_counter.incr(pipe, value)
def check_stat(self, stat_key, time, value):
hashkey = Stat.to_hashkey(key=stat_key, time=time)
stat = Stat.getkey(self.session, hashkey)
self.assertEqual(stat.value, value)
def test_first_run(self):
self.add_counter(StatKey.cell, self.yesterday, 3)
update_statcounter.delay(ago=1).get()
self.check_stat(StatKey.cell, self.yesterday, 3)
def test_update_from_yesterday(self):
self.add_counter(StatKey.cell, self.yesterday, 3)
self.add_counter(StatKey.cell, self.today, 4)
self.session.add(Stat(key=StatKey.cell, time=self.two_days, value=2))
self.session.flush()
update_statcounter.delay(ago=1).get()
self.check_stat(StatKey.cell, self.yesterday, 5)
def test_multiple_updates_for_today(self):
self.add_counter(StatKey.cell, self.today, 4)
self.session.add(Stat(key=StatKey.cell, time=self.yesterday, value=5))
self.session.flush()
update_statcounter.delay(ago=0).get()
self.check_stat(StatKey.cell, self.today, 9)
self.add_counter(StatKey.cell, self.today, 3)
update_statcounter.delay(ago=0).get()
self.check_stat(StatKey.cell, self.today, 12)
def test_update_with_gap(self):
a_week_ago = self.today - timedelta(days=7)
self.add_counter(StatKey.cell, self.yesterday, 3)
self.add_counter(StatKey.cell, self.today, 4)
self.session.add(Stat(key=StatKey.cell, time=a_week_ago, value=7))
self.session.flush()
update_statcounter.delay(ago=1).get()
self.check_stat(StatKey.cell, self.yesterday, 10)
def test_update_does_not_overwrite(self):
self.add_counter(StatKey.cell, self.yesterday, 5)
self.add_counter(StatKey.cell, self.today, 7)
self.session.add(Stat(key=StatKey.cell, time=self.two_days, value=1))
self.session.add(Stat(key=StatKey.cell, time=self.yesterday, value=3))
self.session.flush()
update_statcounter.delay(ago=1).get()
self.check_stat(StatKey.cell, self.yesterday, 8)
def test_update_all_keys(self):
self.add_counter(StatKey.cell, self.yesterday, 2)
self.add_counter(StatKey.wifi, self.yesterday, 3)
self.add_counter(StatKey.unique_cell, self.yesterday, 4)
self.add_counter(StatKey.unique_wifi, self.yesterday, 5)
self.add_counter(StatKey.unique_ocid_cell, self.yesterday, 6)
self.session.add(Stat(key=StatKey.cell, time=self.two_days, value=7))
self.session.add(Stat(key=StatKey.wifi, time=self.two_days, value=8))
self.session.flush()
update_statcounter.delay(ago=1).get()
self.check_stat(StatKey.cell, self.yesterday, 9)
self.check_stat(StatKey.wifi, self.yesterday, 11)
self.check_stat(StatKey.unique_cell, self.yesterday, 4)
self.check_stat(StatKey.unique_wifi, self.yesterday, 5)
self.check_stat(StatKey.unique_ocid_cell, self.yesterday, 6)
| 39.031579 | 78 | 0.687972 |
acf64f7d87672840f78469e6efca124c8c12630b | 6,571 | py | Python | utilities/utils.py | ttocs167/FF-Bingo | 21c561d038fe358412ecea2a9b415632690ab89c | [
"MIT"
] | 1 | 2021-06-27T11:58:30.000Z | 2021-06-27T11:58:30.000Z | utilities/utils.py | ttocs167/FF-Bingo | 21c561d038fe358412ecea2a9b415632690ab89c | [
"MIT"
] | null | null | null | utilities/utils.py | ttocs167/FF-Bingo | 21c561d038fe358412ecea2a9b415632690ab89c | [
"MIT"
] | 5 | 2021-06-27T11:45:35.000Z | 2021-07-19T19:38:16.000Z | import random
import re
import shutil
import csv
import requests
from difflib import SequenceMatcher
riddle_answer_pairs = []
current_riddle_answer = ""
def random_animal_emoji():
emojis = [":frog:", ":pig:", ":rabbit:", ":dog:", ":cat:", ":mouse:", ":hamster:", ":fox:", ":bear:",
":panda_face:", ":hatching_chick:", ":chicken:", ":penguin:"]
choice = random.choice(emojis)
return choice
def random_8ball_response():
responses = ["Yes", "No", "Maybe", "Certainly", "Surely not", "Of Course", "No way",
"Who Cares?", "Fo Sho Dawg", ":frog:"]
choice = random.choices(responses, weights=[1, 1, 1, 1, 1, 1, 1, .5, .5, .05])[0]
output = "_**" + choice + "**_"
return output
async def add_to_list(new_line, guild):
with open("lists/" + guild + "/list.txt", 'a') as file:
file.writelines(new_line + "\n")
async def add_to_free_list(new_line, guild):
with open("lists/" + guild + "/free_list.txt", 'a') as file:
file.writelines(new_line + "\n")
async def list_all_lines(guild):
with open("lists/" + guild + "/list.txt", 'r') as file:
lines = list(enumerate(file.readlines()))
lines = [item for sublist in lines for item in sublist]
lines = list(map(str, lines))
chunks = [lines[x:x+50] for x in range(0, len(lines), 50)]
return chunks
async def list_all_free_lines(guild):
with open("lists/" + guild + "/free_list.txt", 'r') as file:
lines = list(enumerate(file.readlines()))
lines = [item for sublist in lines for item in sublist]
lines = list(map(str, lines))
chunks = [lines[x:x + 50] for x in range(0, len(lines), 50)]
return chunks
async def get_line(index, guild):
with open("lists/" + guild + "/list.txt", "r") as infile:
lines = infile.readlines()
return lines[index]
async def get_free_line(index, guild):
with open("lists/" + guild + "/free_list.txt", "r") as infile:
lines = infile.readlines()
return lines[index]
async def delete_line(index, guild):
with open("lists/" + guild + "/list.txt", "r") as infile:
lines = infile.readlines()
if index <= len(lines):
with open("lists/" + guild + "/list.txt", "w") as outfile:
for pos, line in enumerate(lines):
if pos != index:
outfile.write(line)
async def delete_free_line(index, guild):
with open("lists/" + guild + "/free_list.txt", "r") as infile:
lines = infile.readlines()
if index <= len(lines):
with open("lists/" + guild + "/free_list.txt", "w") as outfile:
for pos, line in enumerate(lines):
if pos != index:
outfile.write(line)
async def reset_list(guild):
try:
shutil.copy("lists/" + guild + "/list.txt", "lists/" + guild + "/list_OLD.txt")
except Exception:
print("There is no old list to backup. New server being initialised.")
with open("./lists/default_list.txt", "r") as default_file:
default_lines = default_file.readlines()
with open("lists/" + guild + "/list.txt", "w") as file:
for line in default_lines:
file.write(line)
async def reset_free_list(guild):
try:
shutil.copy("lists/" + guild + "/free_list.txt", "lists/" + guild + "/free_list_OLD.txt")
except Exception:
print("There is no old free list to backup. New server being initialised.")
with open("./lists/default_free_list.txt", "r") as default_file:
default_lines = default_file.readlines()
with open("lists/" + guild + "/free_list.txt", "w") as file:
for line in default_lines:
file.write(line)
def load_riddles():
global riddle_answer_pairs
with open('./resources/riddles/more riddles.csv', 'r', encoding='utf-8') as read_obj:
# pass the file object to reader() to get the reader object
csv_reader = csv.reader(read_obj)
# Get all rows of csv from csv_reader object as list of tuples
list_of_tuples = list(map(tuple, csv_reader))
riddle_answer_pairs = list_of_tuples
print("riddles loaded!")
return
async def random_riddle_answer():
global current_riddle_answer
pair = random.choice(riddle_answer_pairs)
riddle, answer = str(pair[0]), str(pair[1])
answer = answer.strip(" \"")
out = "_" + riddle + "_" + "\n" + pad_spoiler_with_spaces(answer)
current_riddle_answer = answer
return out
async def check_riddle(text):
global current_riddle_answer
if SequenceMatcher(None, text.lower(), current_riddle_answer.lower()).ratio() > 0.8:
return True
else:
return False
def random_wipe_reason(caller):
reasons = ["Tank", "Off-Tank", "DPS", "Healer", "Yuki", "@" + caller, "🐸"]
weights = [1, 1, 1, 1, 1, 1, 0.05]
reason = random.choices(reasons, weights=weights)[0]
if reason == "@" + caller:
return "_It was the **" + "<" + reason + ">" + "**_"
output = "_It was the **" + reason + "**_"
return output
def yolo_response(img_url):
DETECTION_URL = "http://localhost:5000/v1/object-detection/yolov5s"
response = requests.post(DETECTION_URL, json={"image_url": img_url})
if response.status_code == 200:
img_path = "./yolo/out.jpg"
return img_path
return ""
def pad_spoiler_with_spaces(text):
if len(text) < 10:
for _ in range(10 - len(text)):
text += " "
out = "||" + text + "||"
return out
def emoji_free_text(text):
emoji_pattern = re.compile("["
u"\U0001F600-\U0001F64F" # emoticons
u"\U0001F300-\U0001F5FF" # symbols & pictographs
u"\U0001F680-\U0001F6FF" # transport & map symbols
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
u"\U0001F1F2-\U0001F1F4" # Macau flag
u"\U0001F1E6-\U0001F1FF" # flags
u"\U0001F600-\U0001F64F"
u"\U00002702-\U000027B0"
u"\U000024C2-\U0001F251"
u"\U0001f926-\U0001f937"
u"\U0001F1F2"
u"\U0001F1F4"
u"\U0001F620"
u"\u200d"
u"\u2640-\u2642"
"]+", flags=re.UNICODE)
text = emoji_pattern.sub(r'', text)
return text
| 31.440191 | 105 | 0.570537 |
acf6502af717c1f3c0617a81fc171a2850de245a | 6,868 | py | Python | cirq/optimizers/merge_interactions.py | rickyHong/Cirq-repl | 5b31440d5b5bf5a66ee85ef5e44373ac89aa0eaf | [
"Apache-2.0"
] | 2 | 2019-04-02T09:16:28.000Z | 2019-05-25T18:35:19.000Z | cirq/optimizers/merge_interactions.py | babbush/Cirq | 447b2c762cc2820dd28abb3bd2bc785d36bae39a | [
"Apache-2.0"
] | 36 | 2019-04-03T23:03:51.000Z | 2019-05-15T23:49:01.000Z | cirq/optimizers/merge_interactions.py | babbush/Cirq | 447b2c762cc2820dd28abb3bd2bc785d36bae39a | [
"Apache-2.0"
] | 2 | 2019-04-03T22:55:05.000Z | 2019-04-24T23:24:53.000Z | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An optimization pass that combines adjacent single-qubit rotations."""
from typing import Callable, List, Optional, Sequence, Tuple, cast
import numpy as np
from cirq import circuits, ops, protocols
from cirq.optimizers import two_qubit_decompositions
class MergeInteractions(circuits.PointOptimizer):
"""Combines series of adjacent one and two-qubit gates operating on a pair
of qubits."""
def __init__(self,
tolerance: float = 1e-8,
allow_partial_czs: bool = True,
post_clean_up: Callable[
[Sequence[ops.Operation]], ops.OP_TREE
] = lambda op_list: op_list) -> None:
super().__init__(post_clean_up=post_clean_up)
self.tolerance = tolerance
self.allow_partial_czs = allow_partial_czs
def optimization_at(self,
circuit: circuits.Circuit,
index: int,
op: ops.Operation
) -> Optional[circuits.PointOptimizationSummary]:
if len(op.qubits) != 2:
return None
old_operations, indices, matrix = (
self._scan_two_qubit_ops_into_matrix(circuit, index, op.qubits))
old_interaction_count = len([old_op for old_op in old_operations
if len(old_op.qubits) == 2])
switch_to_new = False
switch_to_new |= any(
len(old_op.qubits) == 2 and
not ops.op_gate_of_type(old_op, ops.CZPowGate)
for old_op in old_operations)
if not self.allow_partial_czs:
switch_to_new |= any(isinstance(old_op, ops.GateOperation) and
isinstance(old_op.gate, ops.CZPowGate)
and old_op.gate.exponent != 1
for old_op in old_operations)
# This point cannot be optimized using this method
if not switch_to_new and old_interaction_count <= 1:
return None
# Find a max-3-cz construction.
new_operations = (
two_qubit_decompositions.two_qubit_matrix_to_operations(
op.qubits[0],
op.qubits[1],
matrix,
self.allow_partial_czs,
self.tolerance,
False))
new_interaction_count = len([new_op for new_op in new_operations
if len(new_op.qubits) == 2])
switch_to_new |= new_interaction_count < old_interaction_count
if not switch_to_new:
return None
return circuits.PointOptimizationSummary(
clear_span=max(indices) + 1 - index,
clear_qubits=op.qubits,
new_operations=new_operations)
def _op_to_matrix(self,
op: Optional[ops.Operation],
qubits: Tuple[ops.Qid, ...]
) -> Optional[np.ndarray]:
"""Determines the effect of an operation on the given qubits.
If the operation is a 1-qubit operation on one of the given qubits,
or a 2-qubit operation on both of the given qubits, and also the
operation has a known matrix, then a matrix is returned. Otherwise None
is returned.
Args:
op: The operation to understand.
qubits: The qubits we care about. Order determines matrix tensor
order.
Returns:
None, or else a matrix equivalent to the effect of the operation.
"""
q1, q2 = qubits
matrix = protocols.unitary(op, None)
if matrix is None:
return None
assert op is not None
if op.qubits == qubits:
return matrix
if op.qubits == (q2, q1):
return MergeInteractions._flip_kron_order(matrix)
if op.qubits == (q1,):
return np.kron(matrix, np.eye(2))
if op.qubits == (q2,):
return np.kron(np.eye(2), matrix)
return None
def _scan_two_qubit_ops_into_matrix(
self,
circuit: circuits.Circuit,
index: Optional[int],
qubits: Tuple[ops.Qid, ...]
) -> Tuple[List[ops.Operation], List[int], np.ndarray]:
"""Accumulates operations affecting the given pair of qubits.
The scan terminates when it hits the end of the circuit, finds an
operation without a known matrix, or finds an operation that interacts
the given qubits with other qubits.
Args:
circuit: The circuit to scan for operations.
index: The index to start scanning forward from.
qubits: The pair of qubits we care about.
Returns:
A tuple containing:
0. The operations.
1. The moment indices those operations were on.
2. A matrix equivalent to the effect of the scanned operations.
"""
product = np.eye(4, dtype=np.complex128)
all_operations = []
touched_indices = []
while index is not None:
operations = list({circuit.operation_at(q, index) for q in qubits})
op_data = [
self._op_to_matrix(op, qubits)
for op in operations
if op is not None
]
# Stop at any non-constant or non-local interaction.
if any(e is None for e in op_data):
break
present_ops = [op for op in operations if op]
present_op_data = cast(List[np.ndarray], op_data)
for op_mat in present_op_data:
product = np.dot(op_mat, product)
all_operations.extend(present_ops)
touched_indices.append(index)
index = circuit.next_moment_operating_on(qubits, index + 1)
return all_operations, touched_indices, product
@staticmethod
def _flip_kron_order(mat4x4: np.ndarray) -> np.ndarray:
"""Given M = sum(kron(a_i, b_i)), returns M' = sum(kron(b_i, a_i))."""
result = np.array([[0] * 4] * 4, dtype=np.complex128)
order = [0, 2, 1, 3]
for i in range(4):
for j in range(4):
result[order[i], order[j]] = mat4x4[i, j]
return result
| 36.727273 | 79 | 0.589691 |
acf651285248d613f2f417c5fcb3402f3bf12191 | 4,040 | py | Python | scipy/misc/doccer.py | seberg/scipy | d8081cdd40ed8cbebd5905c0ad6c323c57d5da6e | [
"BSD-3-Clause"
] | 2 | 2015-10-30T10:04:46.000Z | 2017-03-11T00:58:21.000Z | scipy/misc/doccer.py | seberg/scipy | d8081cdd40ed8cbebd5905c0ad6c323c57d5da6e | [
"BSD-3-Clause"
] | null | null | null | scipy/misc/doccer.py | seberg/scipy | d8081cdd40ed8cbebd5905c0ad6c323c57d5da6e | [
"BSD-3-Clause"
] | null | null | null | ''' Utilities to allow inserting docstring fragments for common
parameters into function and method docstrings'''
import sys
__all__ = ['docformat', 'indentcount_lines', 'filldoc',
'unindent_dict', 'unindent_string']
def docformat(docstring, docdict=None):
''' Fill a function docstring from variables in dictionary
Adapt the indent of the inserted docs
Parameters
----------
docstring : string
docstring from function, possibly with dict formatting strings
docdict : dict
dictionary with keys that match the dict formatting strings
and values that are docstring fragments to be inserted. The
indentation of the inserted docstrings is set to match the
minimum indentation of the ``docstring`` by adding this
indentation to all lines of the inserted string, except the
first
Returns
-------
outstring : string
string with requested ``docdict`` strings inserted
Examples
--------
>>> docformat(' Test string with %(value)s', {'value':'inserted value'})
' Test string with inserted value'
>>> docstring = 'First line\\n Second line\\n %(value)s'
>>> inserted_string = "indented\\nstring"
>>> docdict = {'value': inserted_string}
>>> docformat(docstring, docdict)
'First line\\n Second line\\n indented\\n string'
'''
if not docstring:
return docstring
if docdict is None:
docdict = {}
if not docdict:
return docstring
lines = docstring.expandtabs().splitlines()
# Find the minimum indent of the main docstring, after first line
if len(lines) < 2:
icount = 0
else:
icount = indentcount_lines(lines[1:])
indent = ' ' * icount
# Insert this indent to dictionary docstrings
indented = {}
for name, dstr in docdict.items():
lines = dstr.expandtabs().splitlines()
try:
newlines = [lines[0]]
for line in lines[1:]:
newlines.append(indent+line)
indented[name] = '\n'.join(newlines)
except IndexError:
indented[name] = dstr
return docstring % indented
def indentcount_lines(lines):
''' Minumum indent for all lines in line list
>>> lines = [' one', ' two', ' three']
>>> indentcount_lines(lines)
1
>>> lines = []
>>> indentcount_lines(lines)
0
>>> lines = [' one']
>>> indentcount_lines(lines)
1
>>> indentcount_lines([' '])
0
'''
indentno = sys.maxint
for line in lines:
stripped = line.lstrip()
if stripped:
indentno = min(indentno, len(line) - len(stripped))
if indentno == sys.maxint:
return 0
return indentno
def filldoc(docdict, unindent_params=True):
''' Return docstring decorator using docdict variable dictionary
Parameters
----------
docdict : dictionary
dictionary containing name, docstring fragment pairs
unindent_params : {False, True}, boolean, optional
If True, strip common indentation from all parameters in
docdict
Returns
-------
decfunc : function
decorator that applies dictionary to input function docstring
'''
if unindent_params:
docdict = unindent_dict(docdict)
def decorate(f):
f.__doc__ = docformat(f.__doc__, docdict)
return f
return decorate
def unindent_dict(docdict):
''' Unindent all strings in a docdict '''
can_dict = {}
for name, dstr in docdict.items():
can_dict[name] = unindent_string(dstr)
return can_dict
def unindent_string(docstring):
''' Set docstring to minimum indent for all lines, including first
>>> unindent_string(' two')
'two'
>>> unindent_string(' two\\n three')
'two\\n three'
'''
lines = docstring.expandtabs().splitlines()
icount = indentcount_lines(lines)
if icount == 0:
return docstring
return '\n'.join([line[icount:] for line in lines])
| 28.857143 | 76 | 0.622277 |
acf65155f48239e4fd6cc0ace3bf8d7c820faacd | 3,647 | py | Python | tests/test_animals_class.py | talha-naveed97/BioSim | a5a7bf4e31433ce8f2e70904f0c119f6bb7d58b1 | [
"MIT"
] | null | null | null | tests/test_animals_class.py | talha-naveed97/BioSim | a5a7bf4e31433ce8f2e70904f0c119f6bb7d58b1 | [
"MIT"
] | null | null | null | tests/test_animals_class.py | talha-naveed97/BioSim | a5a7bf4e31433ce8f2e70904f0c119f6bb7d58b1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Test set for Animals class for INF200 June 2021.
"""
from biosim import animals
from biosim.animals import Herbivore, Carnivore, set_animal_params
from biosim.cells import Lowland
import math
import pytest
def test_update_params():
"""
Test that parameters of animals are updated correctly.
"""
custom_value = 7.5
animal = Herbivore(10, 20)
params = {'zeta': custom_value}
Herbivore.update_defaults(params)
assert animal.guideline_params['zeta'] == custom_value
def test_calculate_fitness():
"""
Test that animal fitness is computed correctly.
"""
age = 10
weight = 20
animal = Herbivore(10, 20)
q_age = 1 / (1 + math.exp(animal.guideline_params["phi_age"] *
(age - animal.guideline_params["a_half"])))
q_weight = 1 / (1 + math.exp(-animal.guideline_params["phi_weight"] *
(weight - animal.guideline_params["w_half"])))
fitness = q_age * q_weight
animal.calculate_fitness()
assert animal.fitness == fitness
def test_migration(mocker):
"""
Test that animal is able to migrate if probability is above a certain threshold.
"""
animal = Carnivore(10, 20)
animal.calculate_fitness()
migration_prob = animal.guideline_params["mu"] * animal.fitness
val = migration_prob - 0.01
mocker.patch('random.random', return_value=val)
animal.migration()
assert animal.can_migrate is True
def test_aging():
"""
Test that animal age increases by 1 in one year.
"""
animal = Herbivore(5, 5)
animal.commence_aging()
assert animal.age == 6
def test_death(mocker):
"""
Test that animal will die if its probability is above a certain threshold.
"""
animal = Herbivore(6, 12)
animal.calculate_fitness()
death_prob = animal.guideline_params["omega"] * (1 - animal.fitness)
val = death_prob - 0.01
mocker.patch('random.random', return_value=val)
animal.death()
assert animal.dead is True
def test_set_animal_params():
"""
Test that error is raised is wrong key is given in set_animal_params().
"""
species = 'wrong_species'
params = {'age': 10, 'weight': 20}
with pytest.raises(ValueError):
animals.set_animal_params(species, params)
def test_herbivore_feeds():
"""
Test that herbivores feed properly.
"""
animal = Herbivore(6, 12)
cell = Lowland((6, 6))
f = animal.guideline_params["F"]
if f > cell.food_status:
f = cell.food_status
animal.weight += f * animal.guideline_params["beta"]
animal.calculate_fitness()
feed_left_test = cell.food_status - f
feed_left = animal.feeds(cell.food_status)
assert feed_left_test == feed_left
def test_carnivore_feeds(mocker):
"""
Test that carnivores feed properly.
"""
set_animal_params('Carnivore', {'F': 20})
herbivores = [Herbivore(1, 10) for _ in range(2)]
carn = Carnivore(10, 10)
mocker.patch('random.random', return_value=0)
carn.feeds(herbivores)
assert len([herbivore for herbivore in herbivores if not herbivore.dead]) == 0
def test_carnivore_weight_change(mocker):
"""
Test that carnivore weight changes correctly after feeding.
"""
set_animal_params('Carnivore', {'F': 20})
herbivores = [Herbivore(1, 10) for _ in range(2)]
carn = Carnivore(10, 10)
mocker.patch('random.random', return_value=0)
weight = carn.weight
carn.feeds(herbivores)
for herbivore in herbivores:
weight += carn.guideline_params["beta"] * herbivore.weight
assert weight == carn.weight
| 28.716535 | 84 | 0.66164 |
acf6522d98bee43dcbf702e410e10369b5c16288 | 7,270 | py | Python | tests/sentry/auth/test_superuser.py | pierredup/sentry | 0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80 | [
"BSD-3-Clause"
] | null | null | null | tests/sentry/auth/test_superuser.py | pierredup/sentry | 0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80 | [
"BSD-3-Clause"
] | null | null | null | tests/sentry/auth/test_superuser.py | pierredup/sentry | 0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import
import six
from datetime import timedelta
from django.contrib.auth.models import AnonymousUser
from django.core import signing
from django.utils import timezone
from sentry.utils.compat.mock import Mock
from sentry.auth.superuser import (
COOKIE_DOMAIN,
COOKIE_HTTPONLY,
COOKIE_NAME,
COOKIE_PATH,
COOKIE_SALT,
COOKIE_SECURE,
IDLE_MAX_AGE,
MAX_AGE,
SESSION_KEY,
Superuser,
)
from sentry.middleware.superuser import SuperuserMiddleware
from sentry.models import User
from sentry.testutils import TestCase
from sentry.utils.auth import mark_sso_complete
UNSET = object()
class SuperuserTestCase(TestCase):
def setUp(self):
super(SuperuserTestCase, self).setUp()
self.current_datetime = timezone.now()
self.default_token = "abcdefghjiklmnog"
def build_request(
self,
cookie_token=UNSET,
session_token=UNSET,
expires=UNSET,
idle_expires=UNSET,
uid=UNSET,
session_data=True,
user=None,
):
if user is None:
user = self.create_user("foo@example.com", is_superuser=True)
current_datetime = self.current_datetime
request = self.make_request(user=user)
if cookie_token is not None:
request.COOKIES[COOKIE_NAME] = signing.get_cookie_signer(
salt=COOKIE_NAME + COOKIE_SALT
).sign(self.default_token if cookie_token is UNSET else cookie_token)
if session_data:
request.session[SESSION_KEY] = {
"exp": (
current_datetime + timedelta(hours=6) if expires is UNSET else expires
).strftime("%s"),
"idl": (
current_datetime + timedelta(minutes=15)
if idle_expires is UNSET
else idle_expires
).strftime("%s"),
"tok": self.default_token if session_token is UNSET else session_token,
"uid": six.text_type(user.id) if uid is UNSET else uid,
}
return request
def test_ips(self):
user = User(is_superuser=True)
request = self.make_request(user=user)
request.META["REMOTE_ADDR"] = "10.0.0.1"
# no ips = any host
superuser = Superuser(request, allowed_ips=())
superuser.set_logged_in(request.user)
assert superuser.is_active is True
superuser = Superuser(request, allowed_ips=("127.0.0.1",))
superuser.set_logged_in(request.user)
assert superuser.is_active is False
superuser = Superuser(request, allowed_ips=("10.0.0.1",))
superuser.set_logged_in(request.user)
assert superuser.is_active is True
def test_sso(self):
user = User(is_superuser=True)
request = self.make_request(user=user)
# no ips = any host
superuser = Superuser(request, org_id=None)
superuser.set_logged_in(request.user)
assert superuser.is_active is True
superuser = Superuser(request, org_id=1)
superuser.set_logged_in(request.user)
assert superuser.is_active is False
mark_sso_complete(request, 1)
superuser = Superuser(request, org_id=1)
superuser.set_logged_in(request.user)
assert superuser.is_active is True
def test_valid_data(self):
request = self.build_request()
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active is True
def test_missing_cookie(self):
request = self.build_request(cookie_token=None)
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active is False
def test_invalid_cookie_token(self):
request = self.build_request(cookie_token="foobar")
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active is False
def test_invalid_session_token(self):
request = self.build_request(session_token="foobar")
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active is False
def test_missing_data(self):
request = self.build_request(session_data=False)
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active is False
def test_invalid_uid(self):
request = self.build_request(uid=-1)
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active is False
def test_expired(self):
request = self.build_request(expires=self.current_datetime)
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active is False
def test_idle_expired(self):
request = self.build_request(idle_expires=self.current_datetime)
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active is False
def test_login_saves_session(self):
user = self.create_user("foo@example.com", is_superuser=True)
request = self.make_request()
superuser = Superuser(request, allowed_ips=(), current_datetime=self.current_datetime)
superuser.set_logged_in(user, current_datetime=self.current_datetime)
# request.user wasn't set
assert not superuser.is_active
request.user = user
assert superuser.is_active
data = request.session.get(SESSION_KEY)
assert data
assert data["exp"] == (self.current_datetime + MAX_AGE).strftime("%s")
assert data["idl"] == (self.current_datetime + IDLE_MAX_AGE).strftime("%s")
assert len(data["tok"]) == 12
assert data["uid"] == six.text_type(user.id)
def test_logout_clears_session(self):
request = self.build_request()
superuser = Superuser(request, allowed_ips=(), current_datetime=self.current_datetime)
superuser.set_logged_out()
assert not superuser.is_active
assert not request.session.get(SESSION_KEY)
def test_middleware_as_superuser(self):
request = self.build_request()
delattr(request, "superuser")
delattr(request, "is_superuser")
middleware = SuperuserMiddleware()
middleware.process_request(request)
assert request.superuser.is_active
assert request.is_superuser()
response = Mock()
middleware.process_response(request, response)
response.set_signed_cookie.assert_called_once_with(
COOKIE_NAME,
request.superuser.token,
salt=COOKIE_SALT,
max_age=None,
secure=request.is_secure() if COOKIE_SECURE is None else COOKIE_SECURE,
httponly=COOKIE_HTTPONLY,
path=COOKIE_PATH,
domain=COOKIE_DOMAIN,
)
def test_changed_user(self):
request = self.build_request()
superuser = Superuser(request, allowed_ips=())
assert superuser.is_active
# anonymous
request.user = AnonymousUser()
assert not superuser.is_active
# a non-superuser
request.user = self.create_user("baz@example.com")
assert not superuser.is_active
# a superuser
request.user.update(is_superuser=True)
assert not superuser.is_active
| 34.131455 | 94 | 0.657772 |
acf652a8a74d7c4fac2e548464e53429eaefd1f2 | 66 | py | Python | Chapter08/mod3.py | LuisPereda/Learning_Python | e89e69346c5584be10d991010f39b59329793ba5 | [
"MIT"
] | null | null | null | Chapter08/mod3.py | LuisPereda/Learning_Python | e89e69346c5584be10d991010f39b59329793ba5 | [
"MIT"
] | null | null | null | Chapter08/mod3.py | LuisPereda/Learning_Python | e89e69346c5584be10d991010f39b59329793ba5 | [
"MIT"
] | null | null | null | from module1 import sum1
x = 12
y = 34
print "Sum is ", sum1(x,y)
| 13.2 | 26 | 0.651515 |
acf652d3fb9743d69b7f7e248ff7a3ee83fc4c50 | 3,817 | py | Python | python/paddle/fluid/tests/unittests/test_elementwise_sub_op.py | limeng357/Paddle | dbd25805c88c48998eb9dc0f4b2ca1fd46326482 | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2019-04-28T13:29:41.000Z | 2022-01-09T16:54:20.000Z | python/paddle/fluid/tests/unittests/test_elementwise_sub_op.py | limeng357/Paddle | dbd25805c88c48998eb9dc0f4b2ca1fd46326482 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2018-04-11T10:25:51.000Z | 2018-04-12T01:17:22.000Z | python/paddle/fluid/tests/unittests/test_elementwise_sub_op.py | limeng357/Paddle | dbd25805c88c48998eb9dc0f4b2ca1fd46326482 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-11-04T08:01:39.000Z | 2020-11-06T08:33:28.000Z | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from op_test import OpTest
class TestElementwiseOp(OpTest):
def setUp(self):
self.op_type = "elementwise_sub"
self.inputs = {
'X': np.random.uniform(0.1, 1, [13, 17]).astype("float32"),
'Y': np.random.uniform(0.1, 1, [13, 17]).astype("float32")
}
self.outputs = {'Out': self.inputs['X'] - self.inputs['Y']}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.005)
def test_check_grad_ingore_x(self):
self.check_grad(
['Y'], 'Out', max_relative_error=0.005, no_grad_set=set("X"))
def test_check_grad_ingore_y(self):
self.check_grad(
['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y'))
class TestElementwiseSubOp_scalar(TestElementwiseOp):
def setUp(self):
self.op_type = "elementwise_sub"
self.inputs = {
'X': np.random.rand(2, 3, 4).astype(np.float32),
'Y': np.random.rand(1).astype(np.float32)
}
self.outputs = {'Out': self.inputs['X'] - self.inputs['Y']}
class TestElementwiseSubOp_Vector(TestElementwiseOp):
def setUp(self):
self.op_type = "elementwise_sub"
self.inputs = {
'X': np.random.random((32, )).astype("float32"),
'Y': np.random.random((32, )).astype("float32")
}
self.outputs = {'Out': self.inputs['X'] - self.inputs['Y']}
class TestElementwiseSubOp_broadcast_0(TestElementwiseOp):
def setUp(self):
self.op_type = "elementwise_sub"
self.inputs = {
'X': np.random.rand(2, 3, 4).astype(np.float32),
'Y': np.random.rand(2).astype(np.float32)
}
self.attrs = {'axis': 0}
self.outputs = {
'Out': self.inputs['X'] - self.inputs['Y'].reshape(2, 1, 1)
}
class TestElementwiseSubOp_broadcast_1(TestElementwiseOp):
def setUp(self):
self.op_type = "elementwise_sub"
self.inputs = {
'X': np.random.rand(2, 3, 4).astype(np.float32),
'Y': np.random.rand(3).astype(np.float32)
}
self.attrs = {'axis': 1}
self.outputs = {
'Out': self.inputs['X'] - self.inputs['Y'].reshape(1, 3, 1)
}
class TestElementwiseSubOp_broadcast_2(TestElementwiseOp):
def setUp(self):
self.op_type = "elementwise_sub"
self.inputs = {
'X': np.random.rand(2, 3, 4).astype(np.float32),
'Y': np.random.rand(4).astype(np.float32)
}
self.outputs = {
'Out': self.inputs['X'] - self.inputs['Y'].reshape(1, 1, 4)
}
class TestElementwiseSubOp_broadcast_3(TestElementwiseOp):
def setUp(self):
self.op_type = "elementwise_sub"
self.inputs = {
'X': np.random.rand(2, 3, 4, 5).astype(np.float32),
'Y': np.random.rand(3, 4).astype(np.float32)
}
self.attrs = {'axis': 1}
self.outputs = {
'Out': self.inputs['X'] - self.inputs['Y'].reshape(1, 3, 4, 1)
}
if __name__ == '__main__':
unittest.main()
| 31.808333 | 74 | 0.596542 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.