hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a0551500b3c2d01b7a509b573d09f5adffb97a6
| 2,982
|
py
|
Python
|
test/calibration/experiments/test_rough_frequency.py
|
LaurinFischer/qiskit-experiments
|
d8202f4eeb76df04855b024c1127a7d901a34da3
|
[
"Apache-2.0"
] | 1
|
2022-03-31T06:56:04.000Z
|
2022-03-31T06:56:04.000Z
|
test/calibration/experiments/test_rough_frequency.py
|
matteoacrossi/qiskit-experiments
|
44d249ff2ec2e90cd630431f66e560528572815e
|
[
"Apache-2.0"
] | null | null | null |
test/calibration/experiments/test_rough_frequency.py
|
matteoacrossi/qiskit-experiments
|
44d249ff2ec2e90cd630431f66e560528572815e
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Rough frequency calibration tests."""
from test.base import QiskitExperimentsTestCase
from test.test_qubit_spectroscopy import SpectroscopyBackend
import numpy as np
from qiskit.test.mock import FakeArmonk
from qiskit_experiments.library import RoughFrequencyCal
from qiskit_experiments.calibration_management import BackendCalibrations
from qiskit_experiments.calibration_management.basis_gate_library import FixedFrequencyTransmon
class TestRoughFrequency(QiskitExperimentsTestCase):
"""Tests for the rough frequency calibration experiment."""
def test_init(self):
"""Test that initialization."""
qubit = 1
cals = BackendCalibrations(FakeArmonk())
frequencies = [1, 2, 3]
unit = "kHz"
auto_update = False
absolute = False
freq = RoughFrequencyCal(
qubit, cals, frequencies, unit=unit, auto_update=auto_update, absolute=absolute
)
self.assertEqual(freq.physical_qubits, (qubit,))
self.assertEqual(freq._frequencies, [1000, 2000, 3000])
self.assertEqual(freq._absolute, False)
self.assertEqual(freq.auto_update, False)
def test_update_calibrations(self):
"""Test that we can properly update an instance of BackendCalibrations."""
freq01 = FakeArmonk().defaults().qubit_freq_est[0]
backend = SpectroscopyBackend(freq_offset=5e6, line_width=2e6)
backend.defaults().qubit_freq_est = [freq01, freq01]
library = FixedFrequencyTransmon(basis_gates=["x", "sx"])
cals = BackendCalibrations(FakeArmonk(), library=library)
prev_freq = cals.get_parameter_value(cals.__qubit_freq_parameter__, (0,))
self.assertEqual(prev_freq, freq01)
frequencies = np.linspace(freq01 - 10.0e6, freq01 + 10.0e6, 21)
RoughFrequencyCal(0, cals, frequencies).run(backend).block_for_results()
# Check the updated frequency which should be shifted by 5MHz.
post_freq = cals.get_parameter_value(cals.__qubit_freq_parameter__, (0,))
self.assertTrue(abs(post_freq - freq01 - 5e6) < 1e6)
def test_experiment_config(self):
"""Test converting to and from config works"""
cals = BackendCalibrations(FakeArmonk())
frequencies = [1, 2, 3]
exp = RoughFrequencyCal(0, cals, frequencies)
loaded_exp = RoughFrequencyCal.from_config(exp.config)
self.assertNotEqual(exp, loaded_exp)
self.assertTrue(self.experiments_equiv(exp, loaded_exp))
| 38.230769
| 95
| 0.716968
|
4a0551ba8e8175e4ae15d924ce4f3e54f3e57618
| 7,996
|
py
|
Python
|
week5/retrieval.py
|
IanRiera/MCV_Museum-Painting-Retrieval
|
655db26107b81810d485de7d308eefb7322992fc
|
[
"MIT"
] | null | null | null |
week5/retrieval.py
|
IanRiera/MCV_Museum-Painting-Retrieval
|
655db26107b81810d485de7d308eefb7322992fc
|
[
"MIT"
] | 1
|
2020-10-21T16:08:40.000Z
|
2020-10-21T16:08:40.000Z
|
week5/retrieval.py
|
IanRiera/MCV_Museum-Painting-Retrieval
|
655db26107b81810d485de7d308eefb7322992fc
|
[
"MIT"
] | null | null | null |
import os
import cv2 as cv
import numpy as np
from tqdm import tqdm
import os
import multiprocessing.dummy as mp
from functools import partial
from itertools import repeat
import week5.masks as masks
import week5.histograms as histograms
import week5.text_boxes as text_boxes_detection
import week5.noise_removal as noise
import week5.utils as utils
import week5.feature_descriptors as feature_descriptors
import week5.image_to_text as text_detection
def image_to_paintings(image_path, params):
img = cv.imread(image_path)
image_id = utils.get_image_id(image_path)
paintings=[img]
text_boxes=[None]
paintings_coords = [[0,0,0,0]]
paintings_coords_angle = None
if params['augmentation'] is not None:
if params['augmentation']['bg']:
if params['augmentation']['rotated']:
[paintings, paintings_coords, paintings_coords_angle] = masks.remove_bg_rotate(img, params, image_id)
else:
[paintings, paintings_coords] = masks.remove_bg(img, params, image_id)
if params['augmentation']['noise']:
paintings = noise.denoise_paintings(paintings, params, image_id)
if params['augmentation']['text']:
[paintings, text_boxes] = text_boxes_detection.remove_text(paintings, params, image_id)
for idx,painting in enumerate(paintings):
if text_boxes[idx] is not None:
text_detected=text_detection.get_text(painting,text_boxes[idx])
predicted_text_path = os.path.join(params['paths']['results'], '{}.txt'.format(image_id))
with open(predicted_text_path,"a+") as f:
f.write(text_detected+"\n")
return [paintings, paintings_coords_angle, text_boxes]
def get_k_images(params, k):
pool_processes = 4
paintings_predicted_list = []
paintings_coords_angle_list = []
with mp.Pool(processes=pool_processes) as p:
image_to_paintings_partial = partial(image_to_paintings, params=params)
print('---Extracting paintings from images (optional: removing background or text)---')
[paintings, paintings_coords_angle, text_boxes] = zip(*list(tqdm(p.imap(image_to_paintings_partial,
[path for path in params['lists']['query']]),
total=len(params['lists']['query']))))
if paintings_coords_angle is not None:
utils.save_pickle(os.path.join(params['paths']['results'], 'frames.pkl'), list(paintings_coords_angle))
all_distances = []
if params['color'] is not None:
compute_bbdd_histograms_partial = partial(histograms.compute_bbdd_histograms,
descriptor=params['color']['descriptor'])
print('---Computing color bbdd_histograms---')
bbdd_histograms = list(tqdm(p.imap(compute_bbdd_histograms_partial,
[path for path in params['lists']['bbdd']]),
total=len(params['lists']['bbdd'])))
print('---Computing color query_histograms and distances---')
color_distances = histograms.compute_distances(paintings, text_boxes, bbdd_histograms,
descriptor=params['color']['descriptor'],
metric=params['color']['metric'],
weight=params['color']['weight'])
all_distances.append(color_distances)
if params['texture'] is not None:
for texture_id, texture_descriptor in enumerate(params['texture']['descriptor']):
compute_bbdd_histograms_partial = partial(histograms.compute_bbdd_histograms,
descriptor=texture_descriptor)
print('...Computing texture bbdd_histograms...')
bbdd_histograms = list(tqdm(p.imap(compute_bbdd_histograms_partial,
[path for path in params['lists']['bbdd']]),
total=len(params['lists']['bbdd'])))
print('---Computing texture query_histograms and distances---')
texture_distances = histograms.compute_distances(paintings, text_boxes, bbdd_histograms,
descriptor=texture_descriptor,
metric=params['texture']['metric'][texture_id],
weight=params['texture']['weight'][texture_id])
all_distances.append(texture_distances)
if params['orb'] is not None:
print('---Computing ORB bbdd_histograms---')
bbdd_descriptors = list(tqdm(p.imap(feature_descriptors.compute_bbdd_orb_descriptors,
[path for path in params['lists']['bbdd']]),
total=len(params['lists']['bbdd'])))
predicted_paintings_all = []
print('---Computing ORB query_histograms and distances---')
for image_id, paintings_image in tqdm(enumerate(paintings), total=len(paintings)):
predicted_paintings_image = []
text_boxes_image = text_boxes[image_id]
for painting_id, painting in enumerate(paintings_image):
text_box = text_boxes_image[painting_id]
painting_kp, painting_des = feature_descriptors.orb_descriptor(painting, text_box)
if len(painting_kp) > 0:
match_descriptors_partial = partial(feature_descriptors.match_descriptors, query_des=painting_des,
params=params)
matches = p.map(match_descriptors_partial, [kp_des for kp_des in bbdd_descriptors])
predicted_paintings = feature_descriptors.get_top_matches(matches, params)
if predicted_paintings is not None:
predicted_paintings_image.append(predicted_paintings[:k])
else:
predicted_paintings_image.append([-1])
else:
print('???????????????????????????????????????????????????????????????????????')
print(f'Image ID: {image_id}, Painting ID: {painting_id}')
predicted_paintings_image.append([-1])
predicted_paintings_all.append(predicted_paintings_image)
return predicted_paintings_all
if params['text'] is not None:
print('...Computing text histograms and distances...')
bbdd_texts = text_detection.get_bbdd_texts(params['paths']['bbdd'])
text_distances = text_detection.compute_distances(paintings, text_boxes, bbdd_texts,
metric=params['text']['metric'],
weight=params['text']['weight'])
all_distances.append(text_distances)
for q in range(len(paintings)):
qlist = []
for sq in range(len(paintings[q])):
dist = np.array(all_distances[0][q][sq])
for f in range(1, len(all_distances)):
dist += all_distances[f][q][sq]
nearest_indices = np.argsort(dist)[:k]
result_list = [index for index in nearest_indices]
qlist.append(result_list)
paintings_predicted_list.append(qlist)
return paintings_predicted_list
| 48.168675
| 125
| 0.557029
|
4a05535545114bf5d1525030a078f8cf91148c73
| 570
|
py
|
Python
|
packages/python/plotly/plotly/validators/scattergl/marker/_color.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/validators/scattergl/marker/_color.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/validators/scattergl/marker/_color.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(self, plotly_name="color", parent_name="scattergl.marker", **kwargs):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
colorscale_path=kwargs.pop(
"colorscale_path", "scattergl.marker.colorscale"
),
**kwargs,
)
| 35.625
| 86
| 0.633333
|
4a0553d9c9cae084e1c40ff6fd68da6393584397
| 4,240
|
py
|
Python
|
tests/parser_tests/test_parserTests.py
|
clokep/mwcomposerfromhell
|
02ba160ad55ee7fc1b69834cd5cb256e98a52648
|
[
"0BSD"
] | 3
|
2019-10-03T06:46:19.000Z
|
2021-09-25T13:39:32.000Z
|
tests/parser_tests/test_parserTests.py
|
clokep/mwcomposerfromhell
|
02ba160ad55ee7fc1b69834cd5cb256e98a52648
|
[
"0BSD"
] | 1
|
2020-03-18T07:24:28.000Z
|
2020-05-07T11:58:59.000Z
|
tests/parser_tests/test_parserTests.py
|
clokep/mwcomposerfromhell
|
02ba160ad55ee7fc1b69834cd5cb256e98a52648
|
[
"0BSD"
] | 1
|
2021-07-05T11:30:13.000Z
|
2021-07-05T11:30:13.000Z
|
from collections import defaultdict
from datetime import datetime
from pathlib import Path
import mwparserfromhell
import pytest
from mwcomposerfromhell import ArticleResolver, Namespace, WikicodeToHtmlComposer
from mwcomposerfromhell.parser_tests_parser import MediaWikiParserTestsParser
from tests import patch_datetime
# Only a subset of tests pass right now.
with open(Path(__file__).parent / "whitelist.txt") as f:
WHITELIST = {line.strip() for line in f}
# Whether to only run the tests above or to run them all and skip failing tests.
ONLY_RUN_WHITELIST = False
# Some tests have a standard HTML output, while others differ based on the
# parser. Prefer the standard output, then the PHP parser's.
PREFERRED_HTML = (
"html",
"html+tidy",
"html/*",
"html/php",
"html/php+tidy",
"html/parsoid",
)
# A known object.
_SENTINEL = object()
# Set the current time to a particular date.
patch_datetime_fixture = patch_datetime(datetime(1970, 1, 1, 0, 2, 3))
def pytest_generate_tests(metafunc):
"""Auto-generate test cases from parserTests.txt."""
with open(Path(__file__).parent.joinpath("parserTests.txt")) as f:
parser = MediaWikiParserTestsParser(f)
parser.parse()
# The arguments that will be passed into each test case.
argnames = ("wikitext", "html", "resolver", "options", "expected_pass")
# Namespace -> {Article name -> contents}
namespaces = defaultdict(Namespace)
for article_name, article_contents in parser.articles.items():
namespace, _, article_name = article_name.partition(":")
# If there's no name, it means that it is in the main namespace.
if not article_name:
article_name = namespace
namespace = ""
# The articles are inconsistent about what the case of the MediaWiki
# namespace. Hard-code it.
if namespace.lower() == "mediawiki":
namespace = "MediaWiki"
namespaces[namespace][article_name] = mwparserfromhell.parse(article_contents)
resolver = ArticleResolver("/wiki/", "/index.php")
for namespace_name, namespace in namespaces.items():
resolver.add_namespace(namespace_name, namespace)
# Pull out the necessary info for the tests.
test_ids = []
argvalues = []
for test_case in parser.test_cases:
# Find the best matching output from this test.
html = _SENTINEL
for html_section in PREFERRED_HTML:
html = test_case.get(html_section, _SENTINEL)
if html is not _SENTINEL:
break
# Ignore tests without HTML.
if html is _SENTINEL:
continue
# Use the test name as the ID.
test_id = test_case["test"].strip()
# Whether the test is expected to pass.
expected_pass = test_id in WHITELIST
# Sometimes it is useful to only run the whitelisted tests.
if ONLY_RUN_WHITELIST and not expected_pass:
continue
test_ids.append(test_id)
# Some tests don't have options.
options = test_case.get("options", {})
# Add the current test arguments to the list of values.
argvalues.append(
(test_case["wikitext"], html, resolver, options, expected_pass)
)
metafunc.parametrize(argnames, argvalues, ids=test_ids)
def test_parser_tests(wikitext, html, resolver, options, expected_pass):
"""Handle an individual parser test from parserTests.txt."""
if "disable" in options:
pytest.skip("Skipping test")
# Parse the incoming article.
wikicode = mwparserfromhell.parse(wikitext)
# Generate the composer with the current templates.
composer = WikicodeToHtmlComposer(
resolver=resolver,
red_links=True,
expand_templates="pst" not in options,
)
# Convert the wikicode to HTML.
result = composer.compose(wikicode)
# Print out the results for comparison if the test fails.
print(repr(result))
print(repr(html))
try:
# Remove trailing whitespace for comparison.
assert result.strip() == html.strip()
except AssertionError:
if not expected_pass:
pytest.xfail("Expected fail")
raise
| 32.615385
| 86
| 0.673821
|
4a0555a4d6410b38f36aea3d4227c1f6ae1ed3b3
| 391
|
py
|
Python
|
django/hypatia/wsgi.py
|
arunchaganty/odd-nails
|
d3667ea666c02b7a71af1c26c4b22b9f4ab4c7c0
|
[
"Apache-2.0"
] | null | null | null |
django/hypatia/wsgi.py
|
arunchaganty/odd-nails
|
d3667ea666c02b7a71af1c26c4b22b9f4ab4c7c0
|
[
"Apache-2.0"
] | 1
|
2016-08-03T12:44:05.000Z
|
2016-08-04T06:10:31.000Z
|
django/hypatia/wsgi.py
|
arunchaganty/hypatia
|
d3667ea666c02b7a71af1c26c4b22b9f4ab4c7c0
|
[
"Apache-2.0"
] | null | null | null |
"""
WSGI config for hypatia project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hypatia.settings")
application = get_wsgi_application()
| 23
| 78
| 0.787724
|
4a0555abbe09670f29bdff1daf337673e1e3b634
| 332
|
py
|
Python
|
fadoilulmun-im/liniearSearch.py
|
mochhilmi/KumpulanAlgoritma
|
7d92b94ce9e5559b5c3882b7b31dbe672621dce4
|
[
"MIT"
] | 4
|
2020-10-10T12:24:10.000Z
|
2020-10-13T05:32:41.000Z
|
fadoilulmun-im/liniearSearch.py
|
mochhilmi/KumpulanAlgoritma
|
7d92b94ce9e5559b5c3882b7b31dbe672621dce4
|
[
"MIT"
] | 11
|
2020-10-06T11:29:17.000Z
|
2020-10-16T07:50:15.000Z
|
fadoilulmun-im/liniearSearch.py
|
mochhilmi/KumpulanAlgoritma
|
7d92b94ce9e5559b5c3882b7b31dbe672621dce4
|
[
"MIT"
] | 10
|
2020-10-06T11:26:04.000Z
|
2020-10-13T02:27:24.000Z
|
def linierSearch(arr, n, x):
for i in range(0, n):
if arr[i] == x:
return i
return False
arr = [1, 3, 6, 7, 10, 90]
ygdicari = 90
hasil = linierSearch(arr, len(arr), ygdicari)
if hasil:
print("Elemen di temukan di index ke", hasil)
else:
print("Elemen yang dicari tidak ada dalam array")
| 19.529412
| 53
| 0.593373
|
4a0555bf4843ab7da52cdbdd1f17e70e99346c16
| 35,410
|
py
|
Python
|
lib/sqlalchemy/sql/crud.py
|
brussee/sqlalchemy
|
5e3357c70e419c244156ac3885b2cf784b5b3fc0
|
[
"MIT"
] | null | null | null |
lib/sqlalchemy/sql/crud.py
|
brussee/sqlalchemy
|
5e3357c70e419c244156ac3885b2cf784b5b3fc0
|
[
"MIT"
] | null | null | null |
lib/sqlalchemy/sql/crud.py
|
brussee/sqlalchemy
|
5e3357c70e419c244156ac3885b2cf784b5b3fc0
|
[
"MIT"
] | null | null | null |
# sql/crud.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Functions used by compiler.py to determine the parameters rendered
within INSERT and UPDATE statements.
"""
import functools
import operator
from . import coercions
from . import dml
from . import elements
from . import roles
from .. import exc
from .. import util
REQUIRED = util.symbol(
"REQUIRED",
"""
Placeholder for the value within a :class:`.BindParameter`
which is required to be present when the statement is passed
to :meth:`_engine.Connection.execute`.
This symbol is typically used when a :func:`_expression.insert`
or :func:`_expression.update` statement is compiled without parameter
values present.
""",
)
def _get_crud_params(compiler, stmt, compile_state, **kw):
"""create a set of tuples representing column/string pairs for use
in an INSERT or UPDATE statement.
Also generates the Compiled object's postfetch, prefetch, and
returning column collections, used for default handling and ultimately
populating the CursorResult's prefetch_cols() and postfetch_cols()
collections.
"""
compiler.postfetch = []
compiler.insert_prefetch = []
compiler.update_prefetch = []
compiler.returning = []
# getters - these are normally just column.key,
# but in the case of mysql multi-table update, the rules for
# .key must conditionally take tablename into account
(
_column_as_key,
_getattr_col_key,
_col_bind_name,
) = getters = _key_getters_for_crud_column(compiler, stmt, compile_state)
compiler._key_getters_for_crud_column = getters
# no parameters in the statement, no parameters in the
# compiled params - return binds for all columns
if compiler.column_keys is None and compile_state._no_parameters:
return [
(
c,
compiler.preparer.format_column(c),
_create_bind_param(compiler, c, None, required=True),
)
for c in stmt.table.columns
]
if compile_state._has_multi_parameters:
spd = compile_state._multi_parameters[0]
stmt_parameter_tuples = list(spd.items())
elif compile_state._ordered_values:
spd = compile_state._dict_parameters
stmt_parameter_tuples = compile_state._ordered_values
elif compile_state._dict_parameters:
spd = compile_state._dict_parameters
stmt_parameter_tuples = list(spd.items())
else:
stmt_parameter_tuples = spd = None
# if we have statement parameters - set defaults in the
# compiled params
if compiler.column_keys is None:
parameters = {}
elif stmt_parameter_tuples:
parameters = dict(
(_column_as_key(key), REQUIRED)
for key in compiler.column_keys
if key not in spd
)
else:
parameters = dict(
(_column_as_key(key), REQUIRED) for key in compiler.column_keys
)
# create a list of column assignment clauses as tuples
values = []
if stmt_parameter_tuples is not None:
_get_stmt_parameter_tuples_params(
compiler,
compile_state,
parameters,
stmt_parameter_tuples,
_column_as_key,
values,
kw,
)
check_columns = {}
# special logic that only occurs for multi-table UPDATE
# statements
if compile_state.isupdate and compile_state.is_multitable:
_get_update_multitable_params(
compiler,
stmt,
compile_state,
stmt_parameter_tuples,
check_columns,
_col_bind_name,
_getattr_col_key,
values,
kw,
)
if compile_state.isinsert and stmt._select_names:
_scan_insert_from_select_cols(
compiler,
stmt,
compile_state,
parameters,
_getattr_col_key,
_column_as_key,
_col_bind_name,
check_columns,
values,
kw,
)
else:
_scan_cols(
compiler,
stmt,
compile_state,
parameters,
_getattr_col_key,
_column_as_key,
_col_bind_name,
check_columns,
values,
kw,
)
if parameters and stmt_parameter_tuples:
check = (
set(parameters)
.intersection(_column_as_key(k) for k, v in stmt_parameter_tuples)
.difference(check_columns)
)
if check:
raise exc.CompileError(
"Unconsumed column names: %s"
% (", ".join("%s" % (c,) for c in check))
)
if compile_state._has_multi_parameters:
values = _extend_values_for_multiparams(
compiler,
stmt,
compile_state,
values,
_column_as_key,
kw,
)
elif (
not values
and compiler.for_executemany
and compiler.dialect.supports_default_metavalue
):
# convert an "INSERT DEFAULT VALUES"
# into INSERT (firstcol) VALUES (DEFAULT) which can be turned
# into an in-place multi values. This supports
# insert_executemany_returning mode :)
values = [
(
stmt.table.columns[0],
compiler.preparer.format_column(stmt.table.columns[0]),
"DEFAULT",
)
]
return values
def _create_bind_param(
compiler, col, value, process=True, required=False, name=None, **kw
):
if name is None:
name = col.key
bindparam = elements.BindParameter(
name, value, type_=col.type, required=required
)
bindparam._is_crud = True
if process:
bindparam = bindparam._compiler_dispatch(compiler, **kw)
return bindparam
def _handle_values_anonymous_param(compiler, col, value, name, **kw):
# the insert() and update() constructs as of 1.4 will now produce anonymous
# bindparam() objects in the values() collections up front when given plain
# literal values. This is so that cache key behaviors, which need to
# produce bound parameters in deterministic order without invoking any
# compilation here, can be applied to these constructs when they include
# values() (but not yet multi-values, which are not included in caching
# right now).
#
# in order to produce the desired "crud" style name for these parameters,
# which will also be targetable in engine/default.py through the usual
# conventions, apply our desired name to these unique parameters by
# populating the compiler truncated names cache with the desired name,
# rather than having
# compiler.visit_bindparam()->compiler._truncated_identifier make up a
# name. Saves on call counts also.
# for INSERT/UPDATE that's a CTE, we don't need names to match to
# external parameters and these would also conflict in the case where
# multiple insert/update are combined together using CTEs
is_cte = "visiting_cte" in kw
if (
not is_cte
and value.unique
and isinstance(value.key, elements._truncated_label)
):
compiler.truncated_names[("bindparam", value.key)] = name
if value.type._isnull:
# either unique parameter, or other bound parameters that were
# passed in directly
# set type to that of the column unconditionally
value = value._with_binary_element_type(col.type)
return value._compiler_dispatch(compiler, **kw)
def _key_getters_for_crud_column(compiler, stmt, compile_state):
if compile_state.isupdate and compile_state._extra_froms:
# when extra tables are present, refer to the columns
# in those extra tables as table-qualified, including in
# dictionaries and when rendering bind param names.
# the "main" table of the statement remains unqualified,
# allowing the most compatibility with a non-multi-table
# statement.
_et = set(compile_state._extra_froms)
c_key_role = functools.partial(
coercions.expect_as_key, roles.DMLColumnRole
)
def _column_as_key(key):
str_key = c_key_role(key)
if hasattr(key, "table") and key.table in _et:
return (key.table.name, str_key)
else:
return str_key
def _getattr_col_key(col):
if col.table in _et:
return (col.table.name, col.key)
else:
return col.key
def _col_bind_name(col):
if col.table in _et:
return "%s_%s" % (col.table.name, col.key)
else:
return col.key
else:
_column_as_key = functools.partial(
coercions.expect_as_key, roles.DMLColumnRole
)
_getattr_col_key = _col_bind_name = operator.attrgetter("key")
return _column_as_key, _getattr_col_key, _col_bind_name
def _scan_insert_from_select_cols(
compiler,
stmt,
compile_state,
parameters,
_getattr_col_key,
_column_as_key,
_col_bind_name,
check_columns,
values,
kw,
):
(
need_pks,
implicit_returning,
implicit_return_defaults,
postfetch_lastrowid,
) = _get_returning_modifiers(compiler, stmt, compile_state)
cols = [stmt.table.c[_column_as_key(name)] for name in stmt._select_names]
assert compiler.stack[-1]["selectable"] is stmt
compiler.stack[-1]["insert_from_select"] = stmt.select
add_select_cols = []
if stmt.include_insert_from_select_defaults:
col_set = set(cols)
for col in stmt.table.columns:
if col not in col_set and col.default:
cols.append(col)
for c in cols:
col_key = _getattr_col_key(c)
if col_key in parameters and col_key not in check_columns:
parameters.pop(col_key)
values.append((c, compiler.preparer.format_column(c), None))
else:
_append_param_insert_select_hasdefault(
compiler, stmt, c, add_select_cols, kw
)
if add_select_cols:
values.extend(add_select_cols)
ins_from_select = compiler.stack[-1]["insert_from_select"]
ins_from_select = ins_from_select._generate()
ins_from_select._raw_columns = tuple(
ins_from_select._raw_columns
) + tuple(expr for col, col_expr, expr in add_select_cols)
compiler.stack[-1]["insert_from_select"] = ins_from_select
def _scan_cols(
compiler,
stmt,
compile_state,
parameters,
_getattr_col_key,
_column_as_key,
_col_bind_name,
check_columns,
values,
kw,
):
(
need_pks,
implicit_returning,
implicit_return_defaults,
postfetch_lastrowid,
) = _get_returning_modifiers(compiler, stmt, compile_state)
if compile_state._parameter_ordering:
parameter_ordering = [
_column_as_key(key) for key in compile_state._parameter_ordering
]
ordered_keys = set(parameter_ordering)
cols = [
stmt.table.c[key]
for key in parameter_ordering
if isinstance(key, str) and key in stmt.table.c
] + [c for c in stmt.table.c if c.key not in ordered_keys]
else:
cols = stmt.table.columns
for c in cols:
# scan through every column in the target table
col_key = _getattr_col_key(c)
if col_key in parameters and col_key not in check_columns:
# parameter is present for the column. use that.
_append_param_parameter(
compiler,
stmt,
compile_state,
c,
col_key,
parameters,
_col_bind_name,
implicit_returning,
implicit_return_defaults,
values,
kw,
)
elif compile_state.isinsert:
# no parameter is present and it's an insert.
if c.primary_key and need_pks:
# it's a primary key column, it will need to be generated by a
# default generator of some kind, and the statement expects
# inserted_primary_key to be available.
if implicit_returning:
# we can use RETURNING, find out how to invoke this
# column and get the value where RETURNING is an option.
# we can inline server-side functions in this case.
_append_param_insert_pk_returning(
compiler, stmt, c, values, kw
)
else:
# otherwise, find out how to invoke this column
# and get its value where RETURNING is not an option.
# if we have to invoke a server-side function, we need
# to pre-execute it. or if this is a straight
# autoincrement column and the dialect supports it
# we can use cursor.lastrowid.
_append_param_insert_pk_no_returning(
compiler, stmt, c, values, kw
)
elif c.default is not None:
# column has a default, but it's not a pk column, or it is but
# we don't need to get the pk back.
_append_param_insert_hasdefault(
compiler, stmt, c, implicit_return_defaults, values, kw
)
elif c.server_default is not None:
# column has a DDL-level default, and is either not a pk
# column or we don't need the pk.
if implicit_return_defaults and c in implicit_return_defaults:
compiler.returning.append(c)
elif not c.primary_key:
compiler.postfetch.append(c)
elif implicit_return_defaults and c in implicit_return_defaults:
compiler.returning.append(c)
elif (
c.primary_key
and c is not stmt.table._autoincrement_column
and not c.nullable
):
_warn_pk_with_no_anticipated_value(c)
elif compile_state.isupdate:
# no parameter is present and it's an insert.
_append_param_update(
compiler,
compile_state,
stmt,
c,
implicit_return_defaults,
values,
kw,
)
def _append_param_parameter(
compiler,
stmt,
compile_state,
c,
col_key,
parameters,
_col_bind_name,
implicit_returning,
implicit_return_defaults,
values,
kw,
):
value = parameters.pop(col_key)
col_value = compiler.preparer.format_column(
c, use_table=compile_state.include_table_with_column_exprs
)
if coercions._is_literal(value):
value = _create_bind_param(
compiler,
c,
value,
required=value is REQUIRED,
name=_col_bind_name(c)
if not compile_state._has_multi_parameters
else "%s_m0" % _col_bind_name(c),
**kw,
)
elif value._is_bind_parameter:
value = _handle_values_anonymous_param(
compiler,
c,
value,
name=_col_bind_name(c)
if not compile_state._has_multi_parameters
else "%s_m0" % _col_bind_name(c),
**kw,
)
else:
# value is a SQL expression
value = compiler.process(value.self_group(), **kw)
if compile_state.isupdate:
if implicit_return_defaults and c in implicit_return_defaults:
compiler.returning.append(c)
else:
compiler.postfetch.append(c)
else:
if c.primary_key:
if implicit_returning:
compiler.returning.append(c)
elif compiler.dialect.postfetch_lastrowid:
compiler.postfetch_lastrowid = True
elif implicit_return_defaults and c in implicit_return_defaults:
compiler.returning.append(c)
else:
# postfetch specifically means, "we can SELECT the row we just
# inserted by primary key to get back the server generated
# defaults". so by definition this can't be used to get the
# primary key value back, because we need to have it ahead of
# time.
compiler.postfetch.append(c)
values.append((c, col_value, value))
def _append_param_insert_pk_returning(compiler, stmt, c, values, kw):
"""Create a primary key expression in the INSERT statement where
we want to populate result.inserted_primary_key and RETURNING
is available.
"""
if c.default is not None:
if c.default.is_sequence:
if compiler.dialect.supports_sequences and (
not c.default.optional
or not compiler.dialect.sequences_optional
):
values.append(
(
c,
compiler.preparer.format_column(c),
compiler.process(c.default, **kw),
)
)
compiler.returning.append(c)
elif c.default.is_clause_element:
values.append(
(
c,
compiler.preparer.format_column(c),
compiler.process(c.default.arg.self_group(), **kw),
)
)
compiler.returning.append(c)
else:
# client side default. OK we can't use RETURNING, need to
# do a "prefetch", which in fact fetches the default value
# on the Python side
values.append(
(
c,
compiler.preparer.format_column(c),
_create_insert_prefetch_bind_param(compiler, c, **kw),
)
)
elif c is stmt.table._autoincrement_column or c.server_default is not None:
compiler.returning.append(c)
elif not c.nullable:
# no .default, no .server_default, not autoincrement, we have
# no indication this primary key column will have any value
_warn_pk_with_no_anticipated_value(c)
def _append_param_insert_pk_no_returning(compiler, stmt, c, values, kw):
"""Create a primary key expression in the INSERT statement where
we want to populate result.inserted_primary_key and we cannot use
RETURNING.
Depending on the kind of default here we may create a bound parameter
in the INSERT statement and pre-execute a default generation function,
or we may use cursor.lastrowid if supported by the dialect.
"""
if (
# column has a Python-side default
c.default is not None
and (
# and it either is not a sequence, or it is and we support
# sequences and want to invoke it
not c.default.is_sequence
or (
compiler.dialect.supports_sequences
and (
not c.default.optional
or not compiler.dialect.sequences_optional
)
)
)
) or (
# column is the "autoincrement column"
c is stmt.table._autoincrement_column
and (
# dialect can't use cursor.lastrowid
not compiler.dialect.postfetch_lastrowid
and (
# column has a Sequence and we support those
(
c.default is not None
and c.default.is_sequence
and compiler.dialect.supports_sequences
)
or
# column has no default on it, but dialect can run the
# "autoincrement" mechanism explicitly, e.g. PostgreSQL
# SERIAL we know the sequence name
(
c.default is None
and compiler.dialect.preexecute_autoincrement_sequences
)
)
)
):
# do a pre-execute of the default
values.append(
(
c,
compiler.preparer.format_column(c),
_create_insert_prefetch_bind_param(compiler, c, **kw),
)
)
elif (
c.default is None
and c.server_default is None
and not c.nullable
and c is not stmt.table._autoincrement_column
):
# no .default, no .server_default, not autoincrement, we have
# no indication this primary key column will have any value
_warn_pk_with_no_anticipated_value(c)
elif compiler.dialect.postfetch_lastrowid:
# finally, where it seems like there will be a generated primary key
# value and we haven't set up any other way to fetch it, and the
# dialect supports cursor.lastrowid, switch on the lastrowid flag so
# that the DefaultExecutionContext calls upon cursor.lastrowid
compiler.postfetch_lastrowid = True
def _append_param_insert_hasdefault(
compiler, stmt, c, implicit_return_defaults, values, kw
):
if c.default.is_sequence:
if compiler.dialect.supports_sequences and (
not c.default.optional or not compiler.dialect.sequences_optional
):
values.append(
(
c,
compiler.preparer.format_column(c),
compiler.process(c.default, **kw),
)
)
if implicit_return_defaults and c in implicit_return_defaults:
compiler.returning.append(c)
elif not c.primary_key:
compiler.postfetch.append(c)
elif c.default.is_clause_element:
values.append(
(
c,
compiler.preparer.format_column(c),
compiler.process(c.default.arg.self_group(), **kw),
)
)
if implicit_return_defaults and c in implicit_return_defaults:
compiler.returning.append(c)
elif not c.primary_key:
# don't add primary key column to postfetch
compiler.postfetch.append(c)
else:
values.append(
(
c,
compiler.preparer.format_column(c),
_create_insert_prefetch_bind_param(compiler, c, **kw),
)
)
def _append_param_insert_select_hasdefault(compiler, stmt, c, values, kw):
if c.default.is_sequence:
if compiler.dialect.supports_sequences and (
not c.default.optional or not compiler.dialect.sequences_optional
):
values.append(
(c, compiler.preparer.format_column(c), c.default.next_value())
)
elif c.default.is_clause_element:
values.append(
(c, compiler.preparer.format_column(c), c.default.arg.self_group())
)
else:
values.append(
(
c,
compiler.preparer.format_column(c),
_create_insert_prefetch_bind_param(
compiler, c, process=False, **kw
),
)
)
def _append_param_update(
compiler, compile_state, stmt, c, implicit_return_defaults, values, kw
):
include_table = compile_state.include_table_with_column_exprs
if c.onupdate is not None and not c.onupdate.is_sequence:
if c.onupdate.is_clause_element:
values.append(
(
c,
compiler.preparer.format_column(
c,
use_table=include_table,
),
compiler.process(c.onupdate.arg.self_group(), **kw),
)
)
if implicit_return_defaults and c in implicit_return_defaults:
compiler.returning.append(c)
else:
compiler.postfetch.append(c)
else:
values.append(
(
c,
compiler.preparer.format_column(
c,
use_table=include_table,
),
_create_update_prefetch_bind_param(compiler, c, **kw),
)
)
elif c.server_onupdate is not None:
if implicit_return_defaults and c in implicit_return_defaults:
compiler.returning.append(c)
else:
compiler.postfetch.append(c)
elif (
implicit_return_defaults
and (stmt._return_defaults_columns or not stmt._return_defaults)
and c in implicit_return_defaults
):
compiler.returning.append(c)
def _create_insert_prefetch_bind_param(
compiler, c, process=True, name=None, **kw
):
param = _create_bind_param(
compiler, c, None, process=process, name=name, **kw
)
compiler.insert_prefetch.append(c)
return param
def _create_update_prefetch_bind_param(
compiler, c, process=True, name=None, **kw
):
param = _create_bind_param(
compiler, c, None, process=process, name=name, **kw
)
compiler.update_prefetch.append(c)
return param
class _multiparam_column(elements.ColumnElement):
_is_multiparam_column = True
def __init__(self, original, index):
self.index = index
self.key = "%s_m%d" % (original.key, index + 1)
self.original = original
self.default = original.default
self.type = original.type
def compare(self, other, **kw):
raise NotImplementedError()
def _copy_internals(self, other, **kw):
raise NotImplementedError()
def __eq__(self, other):
return (
isinstance(other, _multiparam_column)
and other.key == self.key
and other.original == self.original
)
def _process_multiparam_default_bind(compiler, stmt, c, index, kw):
if not c.default:
raise exc.CompileError(
"INSERT value for column %s is explicitly rendered as a bound"
"parameter in the VALUES clause; "
"a Python-side value or SQL expression is required" % c
)
elif c.default.is_clause_element:
return compiler.process(c.default.arg.self_group(), **kw)
elif c.default.is_sequence:
# these conditions would have been established
# by append_param_insert_(?:hasdefault|pk_returning|pk_no_returning)
# in order for us to be here, so these don't need to be
# checked
# assert compiler.dialect.supports_sequences and (
# not c.default.optional
# or not compiler.dialect.sequences_optional
# )
return compiler.process(c.default, **kw)
else:
col = _multiparam_column(c, index)
if isinstance(stmt, dml.Insert):
return _create_insert_prefetch_bind_param(compiler, col, **kw)
else:
return _create_update_prefetch_bind_param(compiler, col, **kw)
def _get_update_multitable_params(
compiler,
stmt,
compile_state,
stmt_parameter_tuples,
check_columns,
_col_bind_name,
_getattr_col_key,
values,
kw,
):
normalized_params = dict(
(coercions.expect(roles.DMLColumnRole, c), param)
for c, param in stmt_parameter_tuples
)
include_table = compile_state.include_table_with_column_exprs
affected_tables = set()
for t in compile_state._extra_froms:
for c in t.c:
if c in normalized_params:
affected_tables.add(t)
check_columns[_getattr_col_key(c)] = c
value = normalized_params[c]
col_value = compiler.process(c, include_table=include_table)
if coercions._is_literal(value):
value = _create_bind_param(
compiler,
c,
value,
required=value is REQUIRED,
name=_col_bind_name(c),
**kw, # TODO: no test coverage for literal binds here
)
elif value._is_bind_parameter:
value = _handle_values_anonymous_param(
compiler, c, value, name=_col_bind_name(c), **kw
)
else:
compiler.postfetch.append(c)
value = compiler.process(value.self_group(), **kw)
values.append((c, col_value, value))
# determine tables which are actually to be updated - process onupdate
# and server_onupdate for these
for t in affected_tables:
for c in t.c:
if c in normalized_params:
continue
elif c.onupdate is not None and not c.onupdate.is_sequence:
if c.onupdate.is_clause_element:
values.append(
(
c,
compiler.process(c, include_table=include_table),
compiler.process(
c.onupdate.arg.self_group(), **kw
),
)
)
compiler.postfetch.append(c)
else:
values.append(
(
c,
compiler.process(c, include_table=include_table),
_create_update_prefetch_bind_param(
compiler, c, name=_col_bind_name(c), **kw
),
)
)
elif c.server_onupdate is not None:
compiler.postfetch.append(c)
def _extend_values_for_multiparams(
compiler,
stmt,
compile_state,
values,
_column_as_key,
kw,
):
values_0 = values
values = [values]
for i, row in enumerate(compile_state._multi_parameters[1:]):
extension = []
row = {_column_as_key(key): v for key, v in row.items()}
for (col, col_expr, param) in values_0:
if col.key in row:
key = col.key
if coercions._is_literal(row[key]):
new_param = _create_bind_param(
compiler,
col,
row[key],
name="%s_m%d" % (col.key, i + 1),
**kw,
)
else:
new_param = compiler.process(row[key].self_group(), **kw)
else:
new_param = _process_multiparam_default_bind(
compiler, stmt, col, i, kw
)
extension.append((col, col_expr, new_param))
values.append(extension)
return values
def _get_stmt_parameter_tuples_params(
compiler,
compile_state,
parameters,
stmt_parameter_tuples,
_column_as_key,
values,
kw,
):
for k, v in stmt_parameter_tuples:
colkey = _column_as_key(k)
if colkey is not None:
parameters.setdefault(colkey, v)
else:
# a non-Column expression on the left side;
# add it to values() in an "as-is" state,
# coercing right side to bound param
# note one of the main use cases for this is array slice
# updates on PostgreSQL, as the left side is also an expression.
col_expr = compiler.process(
k, include_table=compile_state.include_table_with_column_exprs
)
if coercions._is_literal(v):
v = compiler.process(
elements.BindParameter(None, v, type_=k.type), **kw
)
else:
if v._is_bind_parameter and v.type._isnull:
# either unique parameter, or other bound parameters that
# were passed in directly
# set type to that of the column unconditionally
v = v._with_binary_element_type(k.type)
v = compiler.process(v.self_group(), **kw)
values.append((k, col_expr, v))
def _get_returning_modifiers(compiler, stmt, compile_state):
need_pks = (
compile_state.isinsert
and not stmt._inline
and (
not compiler.for_executemany
or (
compiler.dialect.insert_executemany_returning
and stmt._return_defaults
)
)
and not stmt._returning
and not compile_state._has_multi_parameters
)
implicit_returning = (
need_pks
and compiler.dialect.implicit_returning
and stmt.table.implicit_returning
)
if compile_state.isinsert:
implicit_return_defaults = implicit_returning and stmt._return_defaults
elif compile_state.isupdate:
implicit_return_defaults = (
compiler.dialect.implicit_returning
and stmt.table.implicit_returning
and stmt._return_defaults
)
else:
# this line is unused, currently we are always
# isinsert or isupdate
implicit_return_defaults = False # pragma: no cover
if implicit_return_defaults:
if not stmt._return_defaults_columns:
implicit_return_defaults = set(stmt.table.c)
else:
implicit_return_defaults = set(stmt._return_defaults_columns)
postfetch_lastrowid = need_pks and compiler.dialect.postfetch_lastrowid
return (
need_pks,
implicit_returning,
implicit_return_defaults,
postfetch_lastrowid,
)
def _warn_pk_with_no_anticipated_value(c):
msg = (
"Column '%s.%s' is marked as a member of the "
"primary key for table '%s', "
"but has no Python-side or server-side default generator indicated, "
"nor does it indicate 'autoincrement=True' or 'nullable=True', "
"and no explicit value is passed. "
"Primary key columns typically may not store NULL."
% (c.table.fullname, c.name, c.table.fullname)
)
if len(c.table.primary_key) > 1:
msg += (
" Note that as of SQLAlchemy 1.1, 'autoincrement=True' must be "
"indicated explicitly for composite (e.g. multicolumn) primary "
"keys if AUTO_INCREMENT/SERIAL/IDENTITY "
"behavior is expected for one of the columns in the primary key. "
"CREATE TABLE statements are impacted by this change as well on "
"most backends."
)
util.warn(msg)
| 32.756707
| 79
| 0.580457
|
4a0555d87c495a969ea79075a643bfa62887d159
| 8,966
|
py
|
Python
|
train_BootrapEnsemble_MNIST.py
|
Neronjust2017/Bayesian-neural-networks
|
9d7f781f5c2dfa8fadf26300b4b5b64366c939cd
|
[
"MIT"
] | 4
|
2020-07-07T12:29:03.000Z
|
2021-11-17T07:20:17.000Z
|
train_BootrapEnsemble_MNIST.py
|
Neronjust2017/Bayesian-neural-networks
|
9d7f781f5c2dfa8fadf26300b4b5b64366c939cd
|
[
"MIT"
] | null | null | null |
train_BootrapEnsemble_MNIST.py
|
Neronjust2017/Bayesian-neural-networks
|
9d7f781f5c2dfa8fadf26300b4b5b64366c939cd
|
[
"MIT"
] | 1
|
2020-05-29T08:07:48.000Z
|
2020-05-29T08:07:48.000Z
|
from __future__ import division, print_function
import time
import torch.utils.data
from torchvision import transforms, datasets
import argparse
import matplotlib
from src.Bootstrap_Ensemble.model import *
import copy
matplotlib.use('Agg')
import matplotlib.pyplot as plt
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
parser = argparse.ArgumentParser(description='Train Ensemble of MAP nets using bootstrapping')
parser.add_argument('--weight_decay', type=float, nargs='?', action='store', default=0,
help='Specify the precision of an isotropic Gaussian prior. Default: 0.')
parser.add_argument('--subsample', type=float, nargs='?', action='store', default=0.8,
help='Rate at which to subsample the dataset to train each net in the ensemble. Default: 0.8.')
parser.add_argument('--n_nets', type=int, nargs='?', action='store', default=100,
help='Number of nets in ensemble. Default: 100.')
parser.add_argument('--epochs', type=int, nargs='?', action='store', default=10,
help='How many epochs to train each net. Default: 10.')
parser.add_argument('--lr', type=float, nargs='?', action='store', default=1e-3,
help='learning rate. Default: 1e-3.')
parser.add_argument('--models_dir', type=str, nargs='?', action='store', default='Ensemble_models',
help='Where to save learnt weights and train vectors. Default: \'Ensemble_models\'.')
parser.add_argument('--results_dir', type=str, nargs='?', action='store', default='Ensemble_results',
help='Where to save learnt training plots. Default: \'Ensemble_results\'.')
args = parser.parse_args()
# Where to save models weights
models_dir = args.models_dir
# Where to save plots and error, accuracy vectors
results_dir = args.results_dir
mkdir(models_dir)
mkdir(results_dir)
# ------------------------------------------------------------------------------------------------------
# train config
NTrainPointsMNIST = 60000
batch_size = 128
nb_epochs = args.epochs
log_interval = 1
# ------------------------------------------------------------------------------------------------------
# dataset
cprint('c', '\nData:')
# load data
# data augmentation
transform_train = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=(0.1307,), std=(0.3081,))
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=(0.1307,), std=(0.3081,))
])
use_cuda = torch.cuda.is_available()
trainset = datasets.MNIST(root='../data', train=True, download=True, transform=transform_train)
valset = datasets.MNIST(root='../data', train=False, download=True, transform=transform_test)
## ---------------------------------------------------------------------------------------------------------------------
# net dims
cprint('c', '\nNetwork:')
lr = args.lr
weight_decay = args.weight_decay
########################################################################################
# This is The Bootstrapy part
Nruns = args.n_nets
weight_set_samples = []
p_subsample = args.subsample
############ Nruns:ensemble 数量
for iii in range(Nruns):
keep_idx = []
for idx in range(len(trainset)):
if np.random.binomial(1, p_subsample, size=1) == 1:
keep_idx.append(idx)
keep_idx = np.array(keep_idx)
from torch.utils.data.sampler import SubsetRandomSampler
sampler = SubsetRandomSampler(keep_idx)
if use_cuda:
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=False, pin_memory=True,
num_workers=3, sampler=sampler)
valloader = torch.utils.data.DataLoader(valset, batch_size=batch_size, shuffle=False, pin_memory=True,
num_workers=3)
else:
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=False, pin_memory=False,
num_workers=3, sampler=sampler)
valloader = torch.utils.data.DataLoader(valset, batch_size=batch_size, shuffle=False, pin_memory=False,
num_workers=3)
###############################################################
net = Bootstrap_Net(lr=lr, channels_in=1, side_in=28, cuda=use_cuda, classes=10, batch_size=batch_size,
weight_decay=weight_decay, n_hid=1200)
epoch = 0
## ---------------------------------------------------------------------------------------------------------------------
# train
cprint('c', '\nTrain:')
print(' init cost variables:')
pred_cost_train = np.zeros(nb_epochs)
err_train = np.zeros(nb_epochs)
cost_dev = np.zeros(nb_epochs)
err_dev = np.zeros(nb_epochs)
# best_cost = np.inf
best_err = np.inf
nb_its_dev = 1
tic0 = time.time()
for i in range(epoch, nb_epochs):
net.set_mode_train(True)
tic = time.time()
nb_samples = 0
for x, y in trainloader:
cost_pred, err = net.fit(x, y)
err_train[i] += err
pred_cost_train[i] += cost_pred
nb_samples += len(x)
pred_cost_train[i] /= nb_samples
err_train[i] /= nb_samples
toc = time.time()
net.epoch = i
# ---- print
print("it %d/%d, Jtr_pred = %f, err = %f, " % (i, nb_epochs, pred_cost_train[i], err_train[i]), end="")
cprint('r', ' time: %f seconds\n' % (toc - tic))
# ---- dev
if i % nb_its_dev == 0:
net.set_mode_train(False)
nb_samples = 0
for j, (x, y) in enumerate(valloader):
cost, err, probs = net.eval(x, y)
cost_dev[i] += cost
err_dev[i] += err
nb_samples += len(x)
cost_dev[i] /= nb_samples
err_dev[i] /= nb_samples
cprint('g', ' Jdev = %f, err = %f\n' % (cost_dev[i], err_dev[i]))
if err_dev[i] < best_err:
best_err = err_dev[i]
toc0 = time.time()
runtime_per_it = (toc0 - tic0) / float(nb_epochs)
cprint('r', ' average time: %f seconds\n' % runtime_per_it)
## ---------------------------------------------------------------------------------------------------------------------
# results
cprint('c', '\nRESULTS:')
nb_parameters = net.get_nb_parameters()
best_cost_dev = np.min(cost_dev)
best_cost_train = np.min(pred_cost_train)
err_dev_min = err_dev[::nb_its_dev].min()
print(' cost_dev: %f (cost_train %f)' % (best_cost_dev, best_cost_train))
print(' err_dev: %f' % (err_dev_min))
print(' nb_parameters: %d (%s)' % (nb_parameters, humansize(nb_parameters)))
print(' time_per_it: %fs\n' % (runtime_per_it))
########
weight_set_samples.append(copy.deepcopy(net.model.state_dict()))
## ---------------------------------------------------------------------------------------------------------------------
# fig cost vs its
textsize = 15
marker = 5
plt.figure(dpi=100)
fig, ax1 = plt.subplots()
ax1.plot(pred_cost_train, 'r--')
ax1.plot(range(0, nb_epochs, nb_its_dev), cost_dev[::nb_its_dev], 'b-')
ax1.set_ylabel('Cross Entropy')
plt.xlabel('epoch')
plt.grid(b=True, which='major', color='k', linestyle='-')
plt.grid(b=True, which='minor', color='k', linestyle='--')
lgd = plt.legend(['train error', 'test error'], markerscale=marker, prop={'size': textsize, 'weight': 'normal'})
ax = plt.gca()
plt.title('classification costs')
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(textsize)
item.set_weight('normal')
plt.savefig(results_dir + '/cost%d.png' % iii, bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.figure(dpi=100)
fig2, ax2 = plt.subplots()
ax2.set_ylabel('% error')
ax2.semilogy(range(0, nb_epochs, nb_its_dev), 100 * err_dev[::nb_its_dev], 'b-')
ax2.semilogy(100 * err_train, 'r--')
plt.xlabel('epoch')
plt.grid(b=True, which='major', color='k', linestyle='-')
plt.grid(b=True, which='minor', color='k', linestyle='--')
ax2.get_yaxis().set_minor_formatter(matplotlib.ticker.ScalarFormatter())
ax2.get_yaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter())
lgd = plt.legend(['test error', 'train error'], markerscale=marker, prop={'size': textsize, 'weight': 'normal'})
ax = plt.gca()
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(textsize)
item.set_weight('normal')
plt.savefig(results_dir + '/err%d.png' % iii, bbox_extra_artists=(lgd,), box_inches='tight')
save_object(weight_set_samples, models_dir+'/state_dicts.pkl')
| 37.049587
| 124
| 0.56915
|
4a0556325bbedd93834ead784e53f8dbd1e078d9
| 441
|
py
|
Python
|
quest/urls.py
|
syeddanish41/MedLense
|
c2b4dea3c00dce56eaaf309ccbbb62edbacf2ab6
|
[
"MIT"
] | 3
|
2018-09-24T09:47:36.000Z
|
2018-11-27T18:06:03.000Z
|
quest/urls.py
|
syeddanish41/MedLens
|
c2b4dea3c00dce56eaaf309ccbbb62edbacf2ab6
|
[
"MIT"
] | 1
|
2021-06-10T20:47:05.000Z
|
2021-06-10T20:47:05.000Z
|
quest/urls.py
|
syeddanish41/MedLens
|
c2b4dea3c00dce56eaaf309ccbbb62edbacf2ab6
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from . import views
urlpatterns = [
# changed view.index to view.run_mob
url(r'^$', views.run_query, name = 'index'),
#url(r'^form/', views.get_name, name = 'get_name'),
# currently not in use
url(r'^explore/$', views.explore, name='explore'),
url(r'^explore_article/$', views.explore_article, name='explore_article'),
url(r'^explore_question/$', views.explore_question, name='explore_question'),
]
| 36.75
| 78
| 0.707483
|
4a0556aa920d497b2c494cabc5a10eb4fd4a8a8b
| 1,155
|
py
|
Python
|
github/create/issue/src/formula/formula.py
|
rogerio-ignacio-developer/formulas-github
|
12cf7401f31e4a6212289b839c02de1d612c8271
|
[
"Apache-2.0"
] | 32
|
2021-01-27T17:43:23.000Z
|
2022-03-23T18:00:41.000Z
|
github/create/issue/src/formula/formula.py
|
rogerio-ignacio-developer/formulas-github
|
12cf7401f31e4a6212289b839c02de1d612c8271
|
[
"Apache-2.0"
] | 12
|
2021-01-26T18:14:59.000Z
|
2021-10-04T12:24:41.000Z
|
github/create/issue/src/formula/formula.py
|
rogerio-ignacio-developer/formulas-github
|
12cf7401f31e4a6212289b839c02de1d612c8271
|
[
"Apache-2.0"
] | 11
|
2021-01-28T13:54:24.000Z
|
2022-03-16T12:16:27.000Z
|
#!/usr/bin/python3
import requests
import json
import re
def run(token, owner, repository, title, body, labels, assignees):
url = f"https://api.github.com/repos/{owner}/{repository}/issues"
data = {}
data["title"] = title
data["body"] = body
if labels not in (None, ''):
label_list = format(labels)
data["labels"] = label_list
if assignees not in (None, ''):
assignees_list = format(assignees)
data["assignees"] = assignees_list
json_data = json.dumps(data)
authorization = f"token {token}"
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization" : authorization,
}
r = requests.post(
url=url,
data=json_data,
headers=headers
)
if r.status_code == 201:
print(f"✅ Issue successfully created on \033[36mhttps://github.com/{owner}/{repository}\033[0m!")
else:
print(f"❌ Couldn't create new issue on \033[36mhttps://github.com/{owner}/{repository}")
print (r.status_code, r.reason, r.content)
def format(value):
return re.sub(' ', '', value).strip().split(",")
| 26.25
| 105
| 0.6
|
4a05573f9d2083a3ce8fe0fa19a5cfecb622d0d9
| 2,780
|
py
|
Python
|
sleepify-dash/apps/app1.py
|
Sleepify/Sleepify
|
ed45fd39410650e719f5171bc0542c87a48fc32e
|
[
"MIT"
] | null | null | null |
sleepify-dash/apps/app1.py
|
Sleepify/Sleepify
|
ed45fd39410650e719f5171bc0542c87a48fc32e
|
[
"MIT"
] | null | null | null |
sleepify-dash/apps/app1.py
|
Sleepify/Sleepify
|
ed45fd39410650e719f5171bc0542c87a48fc32e
|
[
"MIT"
] | null | null | null |
import sys
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objs as go
from dash.dependencies import Input, Output
import pandas as pd
from os.path import dirname, join as path_join
from app import app
sys.path.insert(0, path_join(dirname(__file__), "..", ".."))
import base64
from train_score import load_data
logo_filename = './assets/images/top.png' # replace with your own image
encoded_logo = base64.b64encode(open(logo_filename, 'rb').read())
categories = ["all", "training", "movies", "reading", "programming", "girlfriend time",
"work", "relax", "friends", "sleep",
"coffee", "good meal", "hangout with friends"]
days = 1
data_path = path_join(dirname(__file__), "..", "..", "Data", "data.csv")
df = pd.read_csv(data_path)
layout = html.Div(
[
html.Img(src='data:image/png;base64,{}'.format(encoded_logo.decode()), width="400px"),
html.P(),
html.Div(
[
dcc.Dropdown(
id="categories",
options=[
{'label': "{}".format(category), 'value': category}
for category in categories ], value="coffee"
),
dcc.Graph(id="category"),
]
),
html.Button('Predictions', id='predictions'),
], id='main', style={'position': 'absolute', 'left': '35%', 'width': '450px'}
)
@app.callback(
Output("category", "figure"),
[Input("categories", "value")]
)
def update_graph(value):
ytrace0, ytrace1, x_data = [], [], []
if value == "all":
value = ["training", "movies", "reading", "programming", "girlfriend time",
"work", "relax", "friends", "sleep",
"coffee", "good meal", "hangout with friends"]
else:
value = [value]
for hour in df[["activity", "hour", "score"]].itertuples():
if hour.activity in value:
if hour.score >= 0:
ytrace0.append(hour.score)
ytrace1.append(0)
else:
ytrace0.append(0)
ytrace1.append(hour.score)
else:
ytrace0.append(0)
ytrace1.append(0)
x_data.append(hour.hour)
trace0 = go.Bar(
x=x_data,
y=ytrace0,
marker=dict(
color='rgb(49,130,189)',
),
)
trace1 = go.Bar(
x=x_data,
y=ytrace1,
marker=dict(
color='rgba(219, 64, 82, 1.0)'
),
)
layout = go.Layout(
title="Activity Impact on sleep quality",
showlegend=False
)
data = [trace0, trace1]
return go.Figure(data=data, layout=layout)
| 28.659794
| 94
| 0.534173
|
4a05577559422973c30b2fbb37c3f5c6453fcd7a
| 4,083
|
py
|
Python
|
configs/solov2/solov2_r50_fpn_8gpu_1x.py
|
damsalevente/SOLO
|
d045e0f5daf07e5ea582da6af2caf4075861e46b
|
[
"BSD-2-Clause"
] | null | null | null |
configs/solov2/solov2_r50_fpn_8gpu_1x.py
|
damsalevente/SOLO
|
d045e0f5daf07e5ea582da6af2caf4075861e46b
|
[
"BSD-2-Clause"
] | null | null | null |
configs/solov2/solov2_r50_fpn_8gpu_1x.py
|
damsalevente/SOLO
|
d045e0f5daf07e5ea582da6af2caf4075861e46b
|
[
"BSD-2-Clause"
] | null | null | null |
# model settings
model = dict(
type='SOLOv2',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3), # C2, C3, C4, C5
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=0,
num_outs=5),
bbox_head=dict(
type='SOLOv2Head',
num_classes=81,
in_channels=256,
stacked_convs=4,
seg_feat_channels=512,
strides=[8, 8, 16, 32, 32],
scale_ranges=((1, 96), (48, 192), (96, 384), (192, 768), (384, 2048)),
sigma=0.2,
num_grids=[40, 36, 24, 16, 12],
ins_out_channels=256,
loss_ins=dict(
type='DiceLoss',
use_sigmoid=True,
loss_weight=3.0),
loss_cate=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0)),
mask_feat_head=dict(
type='MaskFeatHead',
in_channels=256,
out_channels=128,
start_level=0,
end_level=3,
num_classes=256,
norm_cfg=dict(type='GN', num_groups=32, requires_grad=True)),
)
# training and testing settings
train_cfg = dict()
test_cfg = dict(
nms_pre=500,
score_thr=0.1,
mask_thr=0.5,
update_thr=0.05,
kernel='gaussian', # gaussian/linear
sigma=2.0,
max_per_img=100)
# dataset settings
dataset_type = 'CityscapesDataset'
data_root = '/media/nap/rootMX18.1/home/levente/Dev/data/cityscapes/gtFine_trainvaltest/gtFine/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instancesonly_filtered_gtFine_train.json',
img_prefix=data_root + 'train/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instancesonly_filtered_gtFine_val.json',
img_prefix=data_root + 'val/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instancesonly_filtered_gtFine_val.json',
img_prefix=data_root + 'val/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.01,
step=[9, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
device_ids = range(8)
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/solov2_release_r50_fpn_8gpu_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| 30.470149
| 96
| 0.609846
|
4a055782537fef5b7387a07f831876a8992daac9
| 53,552
|
py
|
Python
|
cardinal_pythonlib/psychiatry/drugs.py
|
bopopescu/pythonlib
|
9c2187d6092ba133342ca3374eb7c86f9d296c30
|
[
"Apache-2.0"
] | null | null | null |
cardinal_pythonlib/psychiatry/drugs.py
|
bopopescu/pythonlib
|
9c2187d6092ba133342ca3374eb7c86f9d296c30
|
[
"Apache-2.0"
] | null | null | null |
cardinal_pythonlib/psychiatry/drugs.py
|
bopopescu/pythonlib
|
9c2187d6092ba133342ca3374eb7c86f9d296c30
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# cardinal_pythonlib/psychiatry/drugs.py
"""
===============================================================================
Original code copyright (C) 2009-2020 Rudolf Cardinal (rudolf@pobox.com).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Drug information, with an emphasis on psychotropic drugs, including
translating specific to generic names.**
**Examples**
Test within Python:
.. code-block:: python
from cardinal_pythonlib.psychiatry.drugs import *
drug_name_to_generic("UNKNOWN")
drug_name_to_generic("UNKNOWN", unknown_to_default=True)
drug_names_to_generic([
"citalopram", "Citalopram", "Cipramil", "Celexa",
"olanzepine", # typo
"dextroamphetamine",
"amitryptyline",
])
**Antidepressants**
As of 2018-07-01, this is a functional superset of the SLAM
antidepressant-finding SQL (see ``dep_med_v1``), though mainly a superset in
non-antidepressant respects; the only antidepressants it adds are:
- buproprion, maprotiline
The SLAM antidepressant finder finds:
- tricyclic (category)
- amitriptyline, clomipramine, dosulepin, doxepin, imipramine, lofepramine,
nortriptyline, trimipramine
- mianserin, trazodone, phenelzine, isocarboxazid, tranylcypromine, moclobemide
- citalopram, escitalopram, fluoxetine, fluvoxamine, paroxetine, sertraline
- mirtazapine, reboxetine, venlafaxine, agomelatine, duloxetine
- flupentixol, tryptophan
Sorted, that is:
.. code-block:: none
agomelatine
amitriptyline
citalopram
clomipramine
dosulepin
doxepin
duloxetine
escitalopram
fluoxetine
flupentixol
fluvoxamine
imipramine
isocarboxazid
lofepramine
mianserin
mirtazapine
moclobemide
nortriptyline
paroxetine
phenelzine
reboxetine
sertraline
tranylcypromine
trazodone
tricyclic
trimipramine
tryptophan
venlafaxine
Compare that against the output of:
.. code-block:: python
[x.generic_name for x in all_drugs_where(slam_antidepressant_finder=True,
include_categories=True)]
**Using this code from R via reticulate**
Test within R:
.. code-block:: r
# -------------------------------------------------------------------------
# Load libraries
# -------------------------------------------------------------------------
RUN_ONCE_ONLY <- '
library(devtools)
devtools::install_github("rstudio/reticulate") # get latest version
'
library(data.table)
library(reticulate)
# -------------------------------------------------------------------------
# Set up reticulate
# -------------------------------------------------------------------------
VENV <- "~/dev/venvs/cardinal_pythonlib" # or your preferred virtualenv
PYTHON_EXECUTABLE <- ifelse(
.Platform$OS.type == "windows",
file.path(VENV, "Scripts", "python.exe"), # Windows
file.path(VENV, "bin", "python") # Linux
)
reticulate::use_python(PYTHON_EXECUTABLE, required=TRUE)
# ... it is CRITICAL to use required=TRUE, or it might fail silently
# Unnecessary now reticulate::use_python() works:
#
# PYTHON_VERSION <- "python3.5"
# CARDINAL_PYTHONLIB_BASEDIR <- ifelse(
# .Platform$OS.type == "windows",
# file.path(VENV, "lib", "site-packages/cardinal_pythonlib"),
# file.path(VENV, "lib", PYTHON_VERSION, "site-packages/cardinal_pythonlib")
# )
# reticulate::use_virtualenv(VENV, required=TRUE)
#
# cpl_fileops <- reticulate::import_from_path("fileops", CARDINAL_PYTHONLIB_BASEDIR)
# cpl_drugs <- reticulate::import_from_path("drugs", file.path(CARDINAL_PYTHONLIB_BASEDIR, "psychiatry"))
#
# ... this is NOT WORKING properly; dotted imports via reticulate::import() fail; also, imports from
# within the Python code fail even if you use reticulate::import_from_path(); this suggests the virtualenv is not set up
# properly; use reticulate::use_python() instead.
# -------------------------------------------------------------------------
# Import Python modules
# -------------------------------------------------------------------------
cardinal_pythonlib <- reticulate::import("cardinal_pythonlib")
cpl_fileops <- reticulate::import("cardinal_pythonlib.fileops")
cpl_drugs <- reticulate::import("cardinal_pythonlib.psychiatry.drugs")
# -------------------------------------------------------------------------
# Do something useful
# -------------------------------------------------------------------------
testnames <- c("citalopram", "Cipramil", "Prozac", "fluoxetine")
# Works for simple variables:
cpl_drugs$drug_names_to_generic(testnames)
# Also works for data table replacements:
dt <- data.table(
subject = c("Alice", "Bob", "Charles", "Dawn", "Egbert", "Flora"),
drug = c("citalopram", "Cipramil", "Prozac", "fluoxetine", "Priadel", "Haldol")
)
dt[, drug_generic := cpl_drugs$drug_names_to_generic(drug)]
dt[, is_antidepressant := cpl_drugs$drug_names_match_criteria(
drug_generic,
names_are_generic=TRUE,
antidepressant=TRUE)]
dt[, is_antidepressant_not_ssri := cpl_drugs$drug_names_match_criteria(
drug_generic,
names_are_generic=TRUE,
antidepressant=TRUE,
ssri=FALSE)]
dt[, is_conventional_antidepressant := cpl_drugs$drug_names_match_criteria(
drug_generic,
names_are_generic=TRUE,
conventional_antidepressant=TRUE)]
dt[, slam_antidepressant_finder := cpl_drugs$drug_names_match_criteria(
drug_generic,
names_are_generic=TRUE,
slam_antidepressant_finder=TRUE,
include_categories=TRUE)]
**Use for SQL finding**
.. code-block:: python
from typing import List
from cardinal_pythonlib.psychiatry.drugs import *
colname = "somecol"
antidepressants = all_drugs_where(conventional_antidepressant=True) # type: List[Drug]
antidep_sql_parts = [drug.sql_column_like_drug(colname) for drug in antidepressants]
antidep_sql = " OR ".join(antidep_sql_parts)
antipsychotics = all_drugs_where(antipsychotic=True) # type: List[Drug]
antipsy_sql_parts = [drug.sql_column_like_drug(colname) for drug in antipsychotics]
antipsy_sql = " OR ".join(antipsy_sql_parts)
alldrugs = all_drugs_where()
alldrug_sql_parts = [drug.sql_column_like_drug(colname) for drug in alldrugs]
alldrug_sql = " OR ".join(alldrug_sql_parts)
lithium = get_drug("lithium")
lithium_sql = lithium.sql_column_like_drug(colname)
# HOWEVER, NOTE THAT LITHIUM IS CURRENTLY OVER-INCLUSIVE and will include
# lithium chloride for LiDCO measurement.
""" # noqa
import re
from typing import Dict, List, Optional, Pattern, Union
from cardinal_pythonlib.sql.literals import sql_string_literal
# =============================================================================
# Regex constants
# =============================================================================
WILDCARD = ".*" # if re.DOTALL is set, this also matches newlines
WB = WORD_BOUNDARY = r"\b"
# =============================================================================
# Class to capture drug information
# =============================================================================
class Drug(object):
"""
Class to describe a specific drug, or a drug category.
Also embodies knowledge about brand names and common misspellings.
See the :const:`DRUGS` list for example of use.
"""
def __init__(
self,
# Names
generic: Union[str, List[str]],
alternatives: List[str] = None,
category_not_drug: bool = False,
add_preceding_wildcards: bool = True,
add_preceding_word_boundary: bool = True,
add_following_wildcards: bool = True,
# Psychiatry
psychotropic: bool = None, # special; can be used as override if False # noqa
antidepressant: bool = False,
conventional_antidepressant: bool = False,
ssri: bool = False,
non_ssri_modern_antidepressant: bool = False,
tricyclic_antidepressant: bool = False,
tetracyclic_and_related_antidepressant: bool = False,
monoamine_oxidase_inhibitor: bool = False,
antipsychotic: bool = False,
first_generation_antipsychotic: bool = False,
second_generation_antipsychotic: bool = False,
stimulant: bool = False,
anticholinergic: bool = False,
benzodiazepine: bool = False,
z_drug: bool = False,
non_benzodiazepine_anxiolytic: bool = False,
gaba_a_functional_agonist: bool = False,
gaba_b_functional_agonist: bool = False,
mood_stabilizer: bool = False,
# Endocrinology
antidiabetic: bool = False,
sulfonylurea: bool = False,
biguanide: bool = False,
glifozin: bool = False,
glp1_agonist: bool = False,
dpp4_inhibitor: bool = False,
meglitinide: bool = False,
thiazolidinedione: bool = False,
# Cardiovascular
cardiovascular: bool = False,
beta_blocker: bool = False,
ace_inhibitor: bool = False,
statin: bool = False,
# Respiratory
respiratory: bool = False,
beta_agonist: bool = False,
# Gastrointestinal
gastrointestinal: bool = False,
proton_pump_inhibitor: bool = False,
nonsteroidal_anti_inflammatory: bool = False,
# Nutritional
vitamin: bool = False,
# Special flags:
slam_antidepressant_finder: bool = False) -> None:
# noinspection PyUnresolvedReferences
"""
Initialize and determine/store category knowledge.
``alternatives`` can include regexes (as text).
We add front/back wildcards by default; this handles all situations
like "depot X", etc. We also add a preceding word boundary (after the
wildcard); thus the usual transformation is ``XXX`` -> ``.*\bXXX.*``.
Args:
generic: generic name, or list of names
alternatives: can include regexes (as text)
category_not_drug: is this a drug category, not a specific drug?
add_preceding_wildcards: when making a regex (etc.), add a wildcard
to the start of all possibilities (generic + alternative names)
that don't already have one?
add_preceding_word_boundary: when making a regex (etc.), add word
boundaries to the start of all possibilities (generic +
alternative names) that don't already have one?
add_following_wildcards: when making a regex (etc.), add a wildcard
to the end of all possibilities (generic + alternative names)
that don't already have one?
psychotropic: a psychotropic drug?
antidepressant: an antidepressant?
conventional_antidepressant: a traditional antidepressant?
ssri: a selective serotonin reuptake inhibitor (SSRI)?
non_ssri_modern_antidepressant: a non-SSRI "modern" antidepressant?
tricyclic_antidepressant: a tricyclic?
tetracyclic_and_related_antidepressant: a tetracyclic or related?
monoamine_oxidase_inhibitor: a MAO-I?
antipsychotic: an antipsychotic?
first_generation_antipsychotic: an FGA?
second_generation_antipsychotic: an SGA?
stimulant: a psychostimulant?
anticholinergic: an anticholinergic?
benzodiazepine: a benzodiazepine?
z_drug: a "Z" drug (e.g. zopiclone, zolpidem, ...)
non_benzodiazepine_anxiolytic: a non-BZ anxiolytic?
gaba_a_functional_agonist: a GABA-A functional agonist?
gaba_b_functional_agonist: a GABA-B functional agonist?
mood_stabilizer: a "mood stabilizer"?
antidiabetic: treats diabetes?
sulfonylurea: a sulfonylurea (sulphonylurea), for diabetes?
biguanide: a biguanide, for diabetes?
glifozin: a glifozin, for diabetes?
glp1_agonist: a GLP-1 agonist, for diabetes?
dpp4_inhibitor: a DPP4 inhibitor, for diabetes?
meglitinide: a meglitinide, for diabetes?
thiazolidinedione: a thiazolidinedione, for diabetes?
cardiovascular: a cardiovascular drug?
beta_blocker: a beta adrenoceptor antagonist?
ace_inhibitor: an ACE inhibitor?
statin: a statin?
respiratory: a respiratory drug?
beta_agonist: a beta adrenoceptor agonist?
gastrointestinal: a gastrointestinal drug?
proton_pump_inhibitor: a PPI?
nonsteroidal_anti_inflammatory: an NSAID?
vitamin: a vitamin?
slam_antidepressant_finder: a drug found by the SLAM
antidepressant-finding code? (A bit specialized, this one!)
Attributes:
mixture (bool): is this a mixture of more than one drug?
Will be set if more than one generic name is given.
all_generics (List[str]): list of all generic names in lower case
generic_name: generic name (or combination name like ``a_with_b``
for mixtures of ``a`` and ``b``)
regex: compiled case-insensitive regular expression to match
possible names
"""
self.add_preceding_word_boundary = add_preceding_word_boundary
self.add_preceding_wildcards = add_preceding_wildcards
self.add_following_wildcards = add_following_wildcards
# ---------------------------------------------------------------------
# Name handling
# ---------------------------------------------------------------------
if isinstance(generic, list):
self.mixture = True
self.all_generics = [x.lower().strip() for x in generic]
self.generic_name = "_with_".join(self.all_generics)
elif isinstance(generic, str):
self.mixture = False
self.generic_name = generic.lower().strip()
self.all_generics = [self.generic_name]
else:
raise ValueError(f"Bad generic_name: {generic!r}")
self.alternatives = alternatives or [] # type: List[str]
self._regex_text = None # type: Optional[str]
self._regex = None # type: Optional[Pattern]
self._sql_like_fragments = None # type: Optional[List[str]]
# ---------------------------------------------------------------------
# Things we know about psychotropics
# ---------------------------------------------------------------------
if (ssri or non_ssri_modern_antidepressant or
tricyclic_antidepressant or
tetracyclic_and_related_antidepressant or
monoamine_oxidase_inhibitor):
conventional_antidepressant = True
if conventional_antidepressant:
antidepressant = True
if first_generation_antipsychotic or second_generation_antipsychotic:
antipsychotic = True
if benzodiazepine or z_drug:
gaba_a_functional_agonist = True
if ((antidepressant or antipsychotic or stimulant or anticholinergic or
gaba_a_functional_agonist or gaba_b_functional_agonist or
mood_stabilizer) and
(psychotropic is not False)):
psychotropic = True
if psychotropic is None:
psychotropic = False
# ---------------------------------------------------------------------
# Things we know about other drugs
# ---------------------------------------------------------------------
if (sulfonylurea or biguanide or glifozin or glp1_agonist or
dpp4_inhibitor or meglitinide or thiazolidinedione):
antidiabetic = True
if beta_blocker or ace_inhibitor:
cardiovascular = True
# ---------------------------------------------------------------------
# Store category knowledge
# ---------------------------------------------------------------------
self.category_not_drug = category_not_drug
self.psychotropic = psychotropic
self.antidepressant = antidepressant
self.conventional_antidepressant = conventional_antidepressant
self.ssri = ssri
self.non_ssri_modern_antidepressant = non_ssri_modern_antidepressant
self.tricyclic = tricyclic_antidepressant
self.tetracyclic_and_related_antidepressant = tetracyclic_and_related_antidepressant # noqa
self.monoamine_oxidase_inhibitor = monoamine_oxidase_inhibitor
self.antipsychotic = antipsychotic
self.first_generation_antipsychotic = first_generation_antipsychotic
self.second_generation_antipsychotic = second_generation_antipsychotic
self.stimulant = stimulant
self.anticholinergic = anticholinergic
self.benzodiazepine = benzodiazepine
self.z_drug = z_drug
self.gaba_a_functional_agonist = gaba_a_functional_agonist
self.gaba_b_functional_agonist = gaba_b_functional_agonist
self.non_benzodiazepine_anxiolytic = non_benzodiazepine_anxiolytic
self.mood_stabilizer = mood_stabilizer
self.antidiabetic = antidiabetic
self.sulfonylurea = sulfonylurea
self.biguanide = biguanide
self.cardiovascular = cardiovascular
self.beta_blocker = beta_blocker
self.ace_inhibitor = ace_inhibitor
self.statin = statin
self.respiratory = respiratory
self.beta_agonist = beta_agonist
self.gastrointestinal = gastrointestinal
self.proton_pump_inhibitor = proton_pump_inhibitor
self.nonsteroidal_anti_inflammatory = nonsteroidal_anti_inflammatory
self.vitamin = vitamin
# ---------------------------------------------------------------------
# Store other flags
# ---------------------------------------------------------------------
self.slam_antidepressant_finder = slam_antidepressant_finder
@property
def regex_text(self) -> str:
"""
Return regex text (yet to be compiled) for this drug.
"""
if self._regex_text is None:
possibilities = [] # type: List[str]
for p in list(set(self.all_generics + self.alternatives)):
if self.add_preceding_word_boundary and not p.startswith(WB):
p = WB + p
if self.add_preceding_wildcards and not p.startswith(WILDCARD):
p = WILDCARD + p
if self.add_following_wildcards and not p.endswith(WILDCARD):
p = p + WILDCARD
possibilities.append(p)
self._regex_text = "|".join("(?:" + x + ")" for x in possibilities)
return self._regex_text
@property
def regex(self) -> Pattern:
"""
Returns a compiled regex for this drug.
"""
if self._regex is None:
self._regex = re.compile(self.regex_text,
re.IGNORECASE | re.DOTALL)
return self._regex
@staticmethod
def regex_to_sql_like(regex_text: str,
single_wildcard: str = "_",
zero_or_more_wildcard: str = "%") -> List[str]:
"""
Converts regular expression text to a reasonably close fragment
for the SQL ``LIKE`` operator.
NOT PERFECT, but works for current built-in regular expressions.
Args:
regex_text: regular expression text to work with
single_wildcard: SQL single wildcard, typically an underscore
zero_or_more_wildcard: SQL "zero/one/many" wildcard, probably always
a percent symbol
Returns:
string for an SQL string literal
Raises:
:exc:`ValueError` for some regex text that it doesn't understand
properly
"""
def append_to_all(new_content: str) -> None:
nonlocal results
results = [r + new_content for r in results]
def split_and_append(new_options: List[str]) -> None:
nonlocal results
newresults = [] # type: List[str]
for option in new_options:
newresults.extend([r + option for r in results])
results = newresults
def deduplicate_wildcards(text: str) -> str:
while zero_or_more_wildcard + zero_or_more_wildcard in text:
text = text.replace(
zero_or_more_wildcard + zero_or_more_wildcard,
zero_or_more_wildcard)
return text
# Basic processing
working = regex_text # strings are immutable
results = [zero_or_more_wildcard] # start with a wildcard
while working:
if working.startswith(".*"):
# e.g. ".*ozapi"
append_to_all(zero_or_more_wildcard)
working = working[2:]
elif working.startswith("["):
# e.g. "[io]peridol"
close_bracket = working.index("]") # may raise
bracketed = working[1:close_bracket]
option_groups = bracketed.split("|")
options = [c for group in option_groups for c in group]
split_and_append(options)
working = working[close_bracket + 1:]
elif len(working) > 1 and working[1] == "?":
# e.g. "r?azole"
split_and_append(["", working[0]])
# ... regex "optional character"
# ... SQL: some results with a single wildcard, some without
working = working[2:]
elif working.startswith("."):
# single character wildcard
append_to_all(single_wildcard)
working = working[1:]
else:
append_to_all(working[0])
working = working[1:]
append_to_all(zero_or_more_wildcard) # end with a wildcard
# Remove any duplicate (consecutive) % wildcards:
results = [deduplicate_wildcards(r) for r in results]
# Done
return results
@property
def sql_like_fragments(self) -> List[str]:
"""
Returns all the string literals to which a database column should be
compared using the SQL ``LIKE`` operator, to match this drug.
This isn't as accurate as the regex, but ``LIKE`` can do less.
``LIKE`` uses the wildcards ``?`` and ``%``.
"""
if self._sql_like_fragments is None:
self._sql_like_fragments = []
for p in list(set(self.all_generics + self.alternatives)):
self._sql_like_fragments.extend(self.regex_to_sql_like(p))
return self._sql_like_fragments
def name_matches(self, name: str) -> bool:
"""
Detects whether the name that's passed matches our knowledge of any of
things that this drug might be called: generic name, brand name(s),
common misspellings.
The parameter should be pre-stripped of edge whitespace.
"""
return bool(self.regex.match(name))
def sql_column_like_drug(self, column_name: str) -> str:
"""
Returns SQL like
.. code-block:: sql
(column_name LIKE '%drugname1%' OR
column_name LIKE '%drugname2%')
for the drug names that this Drug object knows about.
Args:
column_name: column name, pre-escaped if necessary
Returns:
SQL fragment as above
"""
clauses = [
f"{column_name} LIKE {sql_string_literal(f)}"
for f in self.sql_like_fragments
]
return f"({' OR '.join(clauses)})"
# Source data.
DRUGS = [
# In comments below: (*) misspelling, capitalized for brand name, (~)
# hybrid generic/brand name, (+) old name.
# -------------------------------------------------------------------------
# SSRIs
# -------------------------------------------------------------------------
Drug(
"citalopram",
["Cipramil", "Celexa"],
ssri=True,
slam_antidepressant_finder=True
),
Drug(
"escitalopram",
["Cipralex", "Lexapro"],
ssri=True,
slam_antidepressant_finder=True
),
Drug(
"fluoxetine",
["Prozac", "Bellzac", "Oxactin", "Prozep", "Sarafem", "fluox.*"],
# CPFT 2013: "fluoxetine Dec"
ssri=True,
slam_antidepressant_finder=True
),
Drug(
"fluvoxamine",
["Luvox", "Faverin", "fluvoxamine.*"], # e.g. "fluvoxamine maleate"
ssri=True,
slam_antidepressant_finder=True
),
Drug(
"paroxetine",
["Seroxat", "Paxil"], # there are other brands elsewhere...
ssri=True,
slam_antidepressant_finder=True
),
Drug(
"sertraline",
["Lustral", "Zoloft", "Bellsert"],
# NOT Seretra (cf. SLAM code, see email to self 2016-12-02); Seretra =
# seratrodast = for asthma
ssri=True,
slam_antidepressant_finder=True
),
# -------------------------------------------------------------------------
# FIRST-GENERATION ANTIPSYCHOTICS
# -------------------------------------------------------------------------
Drug("benperidol", ["Anquil"], first_generation_antipsychotic=True),
Drug("chlorpromazine", ["Largactil"], first_generation_antipsychotic=True),
Drug(
"flupentixol",
["Depixol", "Fluanxol", "flupent.*", "Depixol.*"],
# e.g. flupenthixol, flupenthixol decanoate, flupentixol decanoate
first_generation_antipsychotic=True,
antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"fluphenazine",
["Modecate", "fluphen.*", "Modecate.*"],
first_generation_antipsychotic=True
),
Drug(
"haloperidol",
[
"Haldol", "Serenase",
"hal[io]p.*", "Dozi.*", "Hald.*", "Serena.*",
# NB Serenase, Serenace.
# CPFT 2013: haloperidol, haloperidol decanoate, Haldol, Haldol
# decanoate, Serenase.
],
first_generation_antipsychotic=True
),
Drug("levomepromazine", ["Nozinan"], first_generation_antipsychotic=True),
Drug("pericyazine", first_generation_antipsychotic=True),
Drug("perphenazine", ["Fentazin"], first_generation_antipsychotic=True),
Drug(
["amitriptyline", "perphenazine"],
["Triptafen"], # special
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
Drug("pimozide", ["Orap"], first_generation_antipsychotic=True),
Drug(
"pipotiazine",
["pipot.*", "Piport.*"],
# ... actually (CPFT 2013): pipotiazine, Piportil
first_generation_antipsychotic=True
),
Drug(
"prochlorperazine",
["Stemetil"],
first_generation_antipsychotic=True
),
Drug("promazine", first_generation_antipsychotic=True),
Drug(
"sulpiride",
["Dolmatil", "Sulpor"],
first_generation_antipsychotic=True
),
Drug(
"trifluoperazine",
["Stelazine"],
first_generation_antipsychotic=True
),
Drug(
"zuclopenthixol",
["zuclop.*", "Clopix.*", "Acc?uphase"],
# ... actually (CPFT 2013): zuclopenthixol, zuclopenthixol acetate,
# zuclopenthixol decanoate, Clopixol, Clopixol Decanoate, Acuphase
first_generation_antipsychotic=True
),
# -------------------------------------------------------------------------
# SECOND-GENERATION ANTIPSYCHOTICS
# -------------------------------------------------------------------------
Drug(
"amisulpride",
["amisulp.*", "Solian"],
# ... actually (CPFT 2013): amisulpiride(*), amisulpride, Solian
second_generation_antipsychotic=True
),
Drug(
"aripiprazole",
["Abilify", "ari?pr?ipr?azol.*"],
second_generation_antipsychotic=True
),
Drug(
"asenapine",
["Saphris", "Sycrest"],
second_generation_antipsychotic=True
),
Drug(
"clozapine",
["cloz.*", "Denz.*", "Zapon.*"],
# ... actually (CPFT 2013): clozapine, Clozaril, clozepine(*)
second_generation_antipsychotic=True
),
Drug(
"iloperidone",
["Fanapt", "Fanapta", "Zomaril"],
second_generation_antipsychotic=True
),
Drug("lurasidone", ["Latuda"], second_generation_antipsychotic=True),
Drug(
"olanzapine",
["olanz.*", "Zalast.*", "Zyprex.*", "Zypad.*"],
# ... actually (CPFT 2013): olanzapine, olanzapine embonate,
# olanz(*), olanzepine(*), olanzapin(*), Zyprexa
second_generation_antipsychotic=True
),
Drug(
"paliperidone",
["Invega", "Xeplion"],
second_generation_antipsychotic=True
),
Drug(
"quetiapine",
["quet.*", "Seroquel"],
# ... actually (CPFT 2013): quetiapine, quetiepine(*), Seroquel
second_generation_antipsychotic=True
),
Drug(
"risperidone",
["risp.*", "Consta"],
# ... actually (CPFT 2013): risperidone, risperadone(*), Risperidone
# Consta (~), Risperdal, Risperdal Consta
second_generation_antipsychotic=True
),
Drug(
"sertindole",
["Serdolect", "Serlect"],
second_generation_antipsychotic=True
),
Drug("ziprasidone", second_generation_antipsychotic=True),
Drug(
"zotepine", # not in UK
["Nipolept", "Losizopilon", "Lodopin", "Setous"],
second_generation_antipsychotic=True
),
# -------------------------------------------------------------------------
# STIMULANTS
# -------------------------------------------------------------------------
Drug(
"amfetamine",
[".*am[ph|f]etamine.*", "Adderall"],
# ... actually (CPFT 2013): dextroamphetamine(+), dexamfetamine
stimulant=True
),
Drug(
"methylphenidate",
["Ritalin", "Concerta.*", "Equasym.*", "Medikinet.*"],
# ... actually (CPFT 2013): methylphenidate, Ritalin, Concerta
stimulant=True
),
Drug("modafinil", ["Provigil"], stimulant=True),
# -------------------------------------------------------------------------
# ANTICHOLINERGICS
# -------------------------------------------------------------------------
Drug("benztropine", ["benzatropine"], anticholinergic=True),
Drug("orphenadrine", ["Biorphen", "Disipal"], anticholinergic=True),
Drug("procyclidine", ["Arpicolin", "Kemadrin"], anticholinergic=True),
Drug("trihexyphenidyl", ["Broflex"], anticholinergic=True),
# -------------------------------------------------------------------------
# OTHER MODERN ANTIDEPRESSANTS
# -------------------------------------------------------------------------
Drug(
"agomelatine",
["Valdoxan"],
non_ssri_modern_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"bupropion",
["Zyban"],
non_ssri_modern_antidepressant=True
# antidepressant license in US, smoking cessation in UK
),
Drug(
"duloxetine",
["Cymbalta", "Yentreve", "duloxat.*"],
non_ssri_modern_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"mirtazapine",
["mirtaz.*", "mirtazepine", "Zispin", "Mirza"],
# ... actually (CPFT 2013): mirtazapine, mirtazepine(*), "mirtazapine
# Dec" (?)
non_ssri_modern_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"reboxetine",
["Edronax", "reboxat.*"],
non_ssri_modern_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"tryptophan",
["Optimax"],
non_ssri_modern_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"venlafaxine",
["venla.*", "Eff?exor.*"],
# ... actually (CPFT 2013): venlafaxine, venlafaxine XL,
non_ssri_modern_antidepressant=True, # though obviously an SSRI too...
slam_antidepressant_finder=True
),
# -------------------------------------------------------------------------
# TRICYCLIC AND RELATED ANTIDEPRESSANTS
# -------------------------------------------------------------------------
Drug(
"tricyclic_antidepressant",
["tricyclic.*", "tca" + WB],
tricyclic_antidepressant=True,
slam_antidepressant_finder=True,
category_not_drug=True,
),
Drug(
"amitriptyline",
["amitr[i|y]pt[i|y]l[i|y]n.*", "Vanatrip", "Elavil", "Endep"],
# ... actually (CPFT 2013): amitriptyline, amitriptiline(*),
# amitryptyline(*)
# Triptafen = amitriptyline + perphenazine; see above.
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"clomipramine",
["Anafranil.*"],
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"dosulepin",
["dothiepin", "Prothiaden"],
# ... actually (CPFT 2013): dosulepin, dothiepin(+)
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"doxepin",
["Sinepin", "Sinequan", "Sinepin", "Xepin"],
# Xepin is cream only
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"imipramine",
["Tofranil"],
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"lofepramine",
["Lomont"],
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"nortriptyline",
["nortr.*", "Allegron", "Pamelor", "Aventyl"],
# ... actually (CPFT 2013): nortriptyline, nortryptiline(*)
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"trimipramine",
["Surmontil"],
tricyclic_antidepressant=True,
slam_antidepressant_finder=True
),
# -------------------------------------------------------------------------
# TETRACYCLIC-RELATED ANTIDEPRESSANTS
# -------------------------------------------------------------------------
Drug(
"mianserin",
tetracyclic_and_related_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"trazodone",
["Molipaxin"],
tetracyclic_and_related_antidepressant=True,
slam_antidepressant_finder=True
),
Drug(
"nefazodone",
# discontinued for hepatotoxicity? But apparently still used in 2014
# in the UK: http://www.bbc.co.uk/news/uk-25745824
["Dutonin", "Nefadar", "Serzone"],
tetracyclic_and_related_antidepressant=True
# brand names from https://en.wikipedia.org/wiki/Nefazodone
# ... yup, still a trickle, mostly from Islington:
# https://openprescribing.net/chemical/0403040T0/
),
Drug(
"maprotiline",
["Ludiomil"],
tetracyclic_and_related_antidepressant=True
),
# -------------------------------------------------------------------------
# MAOIs
# -------------------------------------------------------------------------
Drug(
"phenelzine",
["phenylethylhydrazine", "Alazin", "Nardil"],
monoamine_oxidase_inhibitor=True,
slam_antidepressant_finder=True
# - SLAM code (see e-mail to self 2016-12-02) also has %Alazin%; not sure # noqa
# that's right; see also
# http://www.druglib.com/activeingredient/phenelzine/
# - oh, yes, it is right:
# https://www.pharmacompass.com/active-pharmaceutical-ingredients/alazin # noqa
# - phenylethylhydrazine is a synonym; see
# http://www.minclinic.ru/drugs/drugs_eng/B/Beta-phenylethylhydrazine.html # noqa
),
# not included: pheniprazine
Drug(
"isocarboxazid",
monoamine_oxidase_inhibitor=True,
slam_antidepressant_finder=True
),
Drug(
"moclobemide",
["Manerix"],
monoamine_oxidase_inhibitor=True,
slam_antidepressant_finder=True
),
Drug(
"tranylcypromine",
["Parnate"],
monoamine_oxidase_inhibitor=True,
slam_antidepressant_finder=True
),
# -------------------------------------------------------------------------
# BENZODIAZEPINES
# -------------------------------------------------------------------------
Drug(
"benzodiazepine",
["benzodiazepine.*"],
benzodiazepine=True,
category_not_drug=True
),
Drug("alprazolam", benzodiazepine=True),
Drug("chlordiazepoxide", benzodiazepine=True),
Drug("clobazam", benzodiazepine=True),
Drug("clonazepam", ["Rivotril"], benzodiazepine=True),
Drug(
"diazepam",
["diaz.*", "Valium"],
# ... actually (CPFT 2013): diazepam, diazapam(*), diazapem(*), Valium
benzodiazepine=True
),
Drug("flurazepam", ["Dalmane"], benzodiazepine=True),
Drug("loprazolam", benzodiazepine=True),
Drug("lorazepam", ["Ativan"], benzodiazepine=True),
Drug("lormetazepam", benzodiazepine=True),
Drug("midazolam", ["Hypnovel"], benzodiazepine=True),
Drug("nitrazepam", benzodiazepine=True),
Drug("oxazepam", benzodiazepine=True),
Drug("temazepam", benzodiazepine=True),
# -------------------------------------------------------------------------
# Z-DRUGS
# -------------------------------------------------------------------------
Drug("zaleplon", ["Sonata"], z_drug=True),
Drug(
"zolpidem",
["zolpidem.*", "Stilnoct"],
# ... actually (CPFT 2013): zolpidem, zolpidem tartrate
z_drug=True
),
Drug("zopiclone", ["Zimovane"], z_drug=True),
# -------------------------------------------------------------------------
# OTHER GABA MODULATORS
# -------------------------------------------------------------------------
Drug(
"baclofen",
[
"Lioresal", "Lyflex", "Bacfen", "Baclof", "Bacmax", "Chinofen",
"Parafon", "Riclofen", "Spinofen", "Spinospas", "Tefsole",
"Gablofen", "Kemstro"
],
gaba_b_functional_agonist=True
),
# -------------------------------------------------------------------------
# OTHER ANXIOLYTICS
# -------------------------------------------------------------------------
Drug("buspirone", ["Buspar"], non_benzodiazepine_anxiolytic=True),
# -------------------------------------------------------------------------
# OTHER ANTIMANIC
# -------------------------------------------------------------------------
Drug(
"carbamazepine",
["Carbagen.*", "Tegretol.*"],
# also Tegretol Prolonged Release (formerly Tegretol Retard)
# ... actually (CPFT 2013): carbamazepine, Tegretol
mood_stabilizer=True
),
Drug(
"valproate",
[".*valp.*", "Epilim.*", "Episenta", "Epival", "Convulex", "Depakote"],
# ... also semisodium valproate
# ... actually (CPFT 2013): sodium valproate [chrono], valproic acid,
# valproate, sodium valproate, sodium valporate(*), sodium valporate(*)
# chrono, Depakote
mood_stabilizer=True
),
Drug(
"lithium",
["lithium.*", "Camcolit", "Liskonum", "Priadel", "Li-Liquid"],
# ... actually (CPFT 2013): lithium, lithium carbonate, lithium citrate
# (curious: Priadel must be being changed to lithium...)
antidepressant=True,
mood_stabilizer=True
),
# -------------------------------------------------------------------------
# OTHER FOR BIPOLAR/UNIPOLAR DEPRESSION
# -------------------------------------------------------------------------
Drug(
"lamotrigine",
["lamotrigine.*", "Lamictal"],
mood_stabilizer=True,
antidepressant=True,
),
Drug(
"triiodothyronine",
["tri-iodothyronine", "liothyronine", "Cytomel"],
antidepressant=True,
),
# -------------------------------------------------------------------------
# GENERAL MEDICINE: DIABETES
# -------------------------------------------------------------------------
Drug("glibenclamide", sulfonylurea=True),
Drug(
"gliclazide",
["Zicron", "Diamicron.*", "Dacadis.*", "Vitile.*"],
sulfonylurea=True
),
Drug("glimepiride", ["Amaryl"], sulfonylurea=True),
Drug("glipizide", ["Minodiab"], sulfonylurea=True),
Drug("tolbutamide", sulfonylurea=True),
Drug("metformin", ["metformin.*", "Glucophage.*"], biguanide=True),
Drug("acarbose", ["Glucobay"], antidiabetic=True),
Drug("dapagliflozin", ["Forxiga"], glifozin=True),
Drug("exenatide", ["Byetta", "Bydureon"], glp1_agonist=True),
Drug("linagliptin", ["Trajenta"], dpp4_inhibitor=True),
Drug(["linagliptin", "metformin"], ["Jentadueto"],
biguanide=True, dpp4_inhibitor=True),
Drug("liraglutide", ["Victoza"], glp1_agonist=True),
Drug("lixisenatide", ["Lyxumia"], glp1_agonist=True),
Drug("nateglinide", ["Starlix"], meglitinide=True),
Drug("pioglitazone", ["Actos"], thiazolidinedione=True),
Drug(["pioglitazone", "metformin"], ["Competact"],
thiazolidinedione=True, biguanide=True),
Drug("repaglinide", ["Prandin"], meglitinide=True),
Drug("saxagliptin", ["Onglyza"], dpp4_inhibitor=True),
Drug(["saxagliptin", "metformin"], ["Komboglyze"],
dpp4_inhibitor=True, biguanide=True),
Drug("sitagliptin", ["Januvia"], dpp4_inhibitor=True),
Drug(["sitagliptin", "metformin"], ["Janumet"],
dpp4_inhibitor=True, biguanide=True),
Drug("vildagliptin", ["Galvus"], dpp4_inhibitor=True),
Drug(["vildagliptin", "metformin"], ["Eucreas"],
dpp4_inhibitor=True, biguanide=True),
Drug(
"insulin",
# Insulin. Covering the BNF categories:
# INSULIN
# INSULIN ASPART
# INSULIN GLULISINE
# INSULIN LISPRO
# INSULIN DEGLUDEC
# INSULIN DETEMIR
# INSULIN GLARGINE
# INSULIN ZINC SUSPENSION
# ISOPHANE INSULIN
# PROTAMINE ZINC INSULIN
# BIPHASIC INSULIN ASPART
# BIPHASIC INSULIN LISPRO
# BIPHASIC ISOPHANE INSULIN
[
".*insulin.*", ".*aspart.*", ".*glulisine.*", ".*lispro.*",
".*degludec.*", ".*detemir.*", ".*glargine.*", ".*Hypurin.*",
".*Actrapid.*", ".*Humulin.*", ".*Insuman.*", ".*Novorapid.*",
".*Apidra.*", ".*Humalog.*", ".*Tresiba.*", ".*Levemir.*",
".*Lantus.*", ".*Insulatard.*", ".*NovoMix.*",
],
antidiabetic=True
),
# -------------------------------------------------------------------------
# GENERAL MEDICINE: CARDIOVASCULAR
# -------------------------------------------------------------------------
Drug("aspirin", cardiovascular=True),
Drug("atenolol", beta_blocker=True),
# ACE inhibitors (selected)
Drug("lisinopril", ace_inhibitor=True),
Drug("ramipril", ace_inhibitor=True),
# Statins
Drug("atorvastatin", ["Lipitor"], statin=True),
Drug("fluvastatin", ["Lescol.*"], statin=True),
Drug("pravastatin", ["Lipostat"], statin=True),
Drug("rosuvastatin", ["Crestor"], statin=True),
Drug("simvastatin", ["Zocor"], statin=True),
Drug(["simvastatin", "ezetimibe"], ["Inegy"], statin=True),
# -------------------------------------------------------------------------
# GENERAL MEDICINE: RESPIRATORY
# -------------------------------------------------------------------------
Drug(
"salbutamol",
["salbut.*", "vent.*"],
# ... actually (CPFT 2013): salbutamol
respiratory=True, beta_agonist=True
),
# -------------------------------------------------------------------------
# GENERAL MEDICINE: GASTROINTESTINAL
# -------------------------------------------------------------------------
Drug(
"lactulose",
["lactul.*", "Duphal.*", "Lactug.*", "laevol.*"],
# ... actually (CPFT 2013): lactulose
gastrointestinal=True
),
Drug("lansoprazole", proton_pump_inhibitor=True),
Drug("omeprazole", proton_pump_inhibitor=True),
Drug("senna", gastrointestinal=True),
# -------------------------------------------------------------------------
# GENERAL MEDICINE: OTHER
# -------------------------------------------------------------------------
Drug("ibuprofen", nonsteroidal_anti_inflammatory=True),
Drug("levothyroxine"),
Drug("paracetamol"),
Drug("thiamine", vitamin=True),
# -------------------------------------------------------------------------
# MAYBE ADD:
# - OPIOIDS
# - clonidine
# - cloral betaine
# - ?domperidone
# - donepezil
# - gabapentin
# - hyoscine
# - Keppra = levetiracetam
# - linezolid (as it's an MAOI)
# - memantine
# - methyldopa
# - ?metoclopramide
# - nicotine
# - pregabalin
# - promethazine
# - ropinirole
# - rotigotine
# - selegiline
# - topiramate
# -------------------------------------------------------------------------
] # type: List[Drug]
# =============================================================================
# High-speed lookup versions of the original constants
# =============================================================================
DRUGS_BY_GENERIC_NAME = {d.generic_name: d for d in DRUGS}
# =============================================================================
# Get drug object by name
# =============================================================================
def get_drug(drug_name: str,
name_is_generic: bool = False,
include_categories: bool = False) -> Optional[Drug]:
"""
Converts a drug name to a :class:`.Drug` class.
If you already have the generic name, you can get the Drug more
efficiently by setting ``name_is_generic=True``.
Set ``include_categories=True`` to include drug categories (such as
tricyclics) as well as individual drugs.
"""
drug_name = drug_name.strip().lower()
if name_is_generic:
drug = DRUGS_BY_GENERIC_NAME.get(drug_name) # type: Optional[Drug]
if drug is not None and drug.category_not_drug and not include_categories: # noqa
return None
return drug
else:
for d in DRUGS:
if d.name_matches(drug_name):
return d
return None
# =============================================================================
# Convert drug names to generic equivalents
# =============================================================================
def drug_name_to_generic(drug_name: str,
unknown_to_default: bool = False,
default: str = None,
include_categories: bool = False) -> str:
"""
Converts a drug name to the name of its generic equivalent.
"""
drug = get_drug(drug_name, include_categories=include_categories)
if drug is not None:
return drug.generic_name
return default if unknown_to_default else drug_name
def drug_names_to_generic(drugs: List[str],
unknown_to_default: bool = False,
default: str = None,
include_categories: bool = False) -> List[str]:
"""
Converts a list of drug names to their generic equivalents.
The arguments are as for :func:`drug_name_to_generic` but this function
handles a list of drug names rather than a single one.
Note in passing the following conversion of blank-type representations from
R via ``reticulate``, when using e.g. the ``default`` parameter and storing
results in a ``data.table()`` character column:
.. code-block:: none
------------------------------ ----------------
To Python Back from Python
------------------------------ ----------------
[not passed, so Python None] "NULL"
NULL "NULL"
NA_character_ "NA"
NA TRUE (logical)
------------------------------ ----------------
"""
return [
drug_name_to_generic(drug,
unknown_to_default=unknown_to_default,
default=default,
include_categories=include_categories)
for drug in drugs
]
# =============================================================================
# Check drugs against criteria
# =============================================================================
def drug_matches_criteria(drug: Drug, **criteria: Dict[str, bool]) -> bool:
"""
Determines whether a drug, passed as an instance of :class:`.Drug`, matches
the specified criteria.
Args:
drug: a :class:`.Drug` instance
criteria: ``name=value`` pairs to match against the attributes of
the :class:`Drug` class. For example, you can include keyword
arguments like ``antidepressant=True``.
"""
for attribute, value in criteria.items():
if getattr(drug, attribute) != value:
return False
return True
def all_drugs_where(sort=True,
include_categories: bool = False,
**criteria: Dict[str, bool]) -> List[Drug]:
"""
Find all drugs matching the specified criteria (see
:func:`drug_matches_criteria`). If ``include_categories`` is true, then
drug categories (like "tricyclics") are included as well as individual
drugs.
Pass keyword arguments such as
.. code-block:: python
from cardinal_pythonlib.psychiatry.drugs import *
non_ssri_antidep = all_drugs_where(antidepressant=True, ssri=False)
print([d.generic_name for d in non_ssri_antidep])
conventional_antidep = all_drugs_where(conventional_antidepressant=True)
print([d.generic_name for d in conventional_antidep])
"""
matching_drugs = [] # type: List[Drug]
for drug in DRUGS:
if drug.category_not_drug and not include_categories:
continue
if drug_matches_criteria(drug, **criteria):
matching_drugs.append(drug)
if sort:
matching_drugs.sort(key=lambda d: d.generic_name)
return matching_drugs
def drug_name_matches_criteria(drug_name: str,
name_is_generic: bool = False,
include_categories: bool = False,
**criteria: Dict[str, bool]) -> bool:
"""
Establish whether a single drug, passed by name, matches the specified
criteria. See :func:`drug_matches_criteria`.
"""
drug = get_drug(drug_name, name_is_generic)
if drug is None:
return False
if drug.category_not_drug and not include_categories:
return False
return drug_matches_criteria(drug, **criteria)
def drug_names_match_criteria(drug_names: List[str],
names_are_generic: bool = False,
include_categories: bool = False,
**criteria: Dict[str, bool]) -> List[bool]:
"""
Establish whether multiple drugs, passed as a list of drug names, each
matches the specified criteria. See :func:`drug_matches_criteria`.
"""
return [
drug_name_matches_criteria(
dn,
name_is_generic=names_are_generic,
include_categories=include_categories,
**criteria)
for dn in drug_names
]
| 36.85616
| 124
| 0.546291
|
4a0557dfcae7790a91fe64268f925c93437f8782
| 2,606
|
py
|
Python
|
pypeln/task/api/ordered.py
|
Davidnet/pypeln
|
6e1295c2ac7914dadfa546a937537aa2c2a5978d
|
[
"MIT"
] | null | null | null |
pypeln/task/api/ordered.py
|
Davidnet/pypeln
|
6e1295c2ac7914dadfa546a937537aa2c2a5978d
|
[
"MIT"
] | null | null | null |
pypeln/task/api/ordered.py
|
Davidnet/pypeln
|
6e1295c2ac7914dadfa546a937537aa2c2a5978d
|
[
"MIT"
] | null | null | null |
import typing as tp
from pypeln import utils as pypeln_utils
from pypeln.utils import A, B, T
from ..stage import Stage
from ..worker import ProcessFn, Worker
from .to_stage import to_stage
class Ordered(tp.NamedTuple):
async def __call__(self, worker: Worker, **kwargs):
elems = []
async for elem in worker.stage_params.input_queue:
if len(elems) == 0:
elems.append(elem)
else:
for i in reversed(range(len(elems))):
if elem.index >= elems[i].index:
elems.insert(i + 1, elem)
break
if i == 0:
elems.insert(0, elem)
for _ in range(len(elems)):
await worker.stage_params.output_queues.put(elems.pop(0))
@tp.overload
def ordered(
stage: tp.Union[Stage[A], tp.Iterable[A], tp.AsyncIterable[A]],
) -> Stage[A]:
...
@tp.overload
def ordered() -> pypeln_utils.Partial[Stage[A]]:
...
def ordered(
stage: tp.Union[
Stage[A], tp.Iterable[A], tp.AsyncIterable[A], pypeln_utils.Undefined
] = pypeln_utils.UNDEFINED,
) -> tp.Union[Stage[A], pypeln_utils.Partial[Stage[A]]]:
"""
Creates a stage that sorts its elements based on their order of creation on the source iterable(s) of the pipeline.
```python
import pypeln as pl
import random
import time
def slow_squared(x):
time.sleep(random.random())
return x ** 2
stage = range(5)
stage = pl.process.map(slow_squared, stage, workers = 2)
stage = pl.process.ordered(stage)
print(list(stage)) # [0, 1, 4, 9, 16]
```
!!! note
`ordered` will work even if the previous stages are from different `pypeln` modules, but it may not work if you introduce an itermediate external iterable stage.
!!! warning
This stage will not yield util it accumulates all of the elements from the previous stage, use this only if all elements fit in memory.
Arguments:
stage: A Stage, Iterable, or AsyncIterable.
Returns:
If the `stage` parameters is given then this function returns an iterable, else it returns a `Partial`.
"""
if isinstance(stage, pypeln_utils.Undefined):
return pypeln_utils.Partial(lambda stage: ordered(stage))
stage = to_stage(stage)
return Stage(
process_fn=Ordered(),
workers=1,
maxsize=0,
timeout=0,
total_sources=1,
dependencies=[stage],
on_start=None,
on_done=None,
f_args=[],
)
| 26.323232
| 169
| 0.605909
|
4a0557f7a960b0dcee83cd7ce052bafa0500ed95
| 2,183
|
py
|
Python
|
src/sentry/tasks/store.py
|
vperron/sentry
|
4ea0c8cb120a3165f0e0b185c64213b69ab621ea
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/tasks/store.py
|
vperron/sentry
|
4ea0c8cb120a3165f0e0b185c64213b69ab621ea
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/tasks/store.py
|
vperron/sentry
|
4ea0c8cb120a3165f0e0b185c64213b69ab621ea
|
[
"BSD-3-Clause"
] | null | null | null |
"""
sentry.tasks.store
~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from celery.utils.log import get_task_logger
from time import time
from sentry.cache import default_cache
from sentry.tasks.base import instrumented_task
from sentry.utils import metrics
from sentry.utils.safe import safe_execute
logger = get_task_logger(__name__)
@instrumented_task(
name='sentry.tasks.store.preprocess_event',
queue='events')
def preprocess_event(cache_key=None, data=None, start_time=None, **kwargs):
from sentry.plugins import plugins
if cache_key:
data = default_cache.get(cache_key)
if data is None:
logger.error('Data not available in preprocess_event (cache_key=%s)', cache_key)
return
project = data['project']
# TODO(dcramer): ideally we would know if data changed by default
has_changed = False
for plugin in plugins.all(version=2):
for processor in (safe_execute(plugin.get_event_preprocessors) or ()):
result = safe_execute(processor, data)
if result:
data = result
has_changed = True
assert data['project'] == project, 'Project cannot be mutated by preprocessor'
if has_changed and cache_key:
default_cache.set(cache_key, data, 3600)
if cache_key:
data = None
save_event.delay(cache_key=cache_key, data=data, start_time=start_time)
@instrumented_task(
name='sentry.tasks.store.save_event',
queue='events')
def save_event(cache_key=None, data=None, start_time=None, **kwargs):
"""
Saves an event to the database.
"""
from sentry.event_manager import EventManager
if cache_key:
data = default_cache.get(cache_key)
if data is None:
return
project = data.pop('project')
try:
manager = EventManager(data)
manager.save(project)
finally:
if cache_key:
default_cache.delete(cache_key)
if start_time:
metrics.timing('events.time-to-process', time() - start_time)
| 26.950617
| 88
| 0.67934
|
4a0558ad3382ccb1a278142cb5237ca0dd9e06ca
| 1,329
|
py
|
Python
|
old/realsense.py
|
ostapstephan/SeniorProject
|
f0d3f8067dec98474641c6ec3696dbd86f066a51
|
[
"MIT"
] | 1
|
2018-10-03T00:30:34.000Z
|
2018-10-03T00:30:34.000Z
|
old/realsense.py
|
ostapstephan/SeniorProject
|
f0d3f8067dec98474641c6ec3696dbd86f066a51
|
[
"MIT"
] | null | null | null |
old/realsense.py
|
ostapstephan/SeniorProject
|
f0d3f8067dec98474641c6ec3696dbd86f066a51
|
[
"MIT"
] | null | null | null |
import pyrealsense2 as rs
import numpy as np
import cv2
# Configure depth and color streams
pipeline = rs.pipeline()
config = rs.config()
# config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)
# config.enable_all_streams()
# Start streaming
pipeline.start(config)
try:
while True:
# Wait for a coherent pair of frames: depth and color
frames = pipeline.wait_for_frames()
# depth_frame = frames.get_depth_frame()
depth_frame = 1
color_frame = frames.get_color_frame()
if not depth_frame or not color_frame:
continue
# Convert images to numpy arrays
# depth_image = np.asanyarray(depth_frame.get_data())
color_image = np.asanyarray(color_frame.get_data())
# Apply colormap on depth image (image must be converted to 8-bit per pixel first)
# depth_colormap = cv2.applyColorMap(cv2.convertScaleAbs(depth_image, alpha=0.03), cv2.COLORMAP_JET)
# Stack both images horizontally
images = np.hstack((color_image))#, depth_colormap))
# Show images
cv2.namedWindow('RealSense', cv2.WINDOW_AUTOSIZE)
cv2.imshow('RealSense', images)
cv2.waitKey(1)
finally:
# Stop streaming
pipeline.stop()
| 29.533333
| 108
| 0.680963
|
4a05590389623c4020d742051e5fdc73246d818f
| 1,672
|
py
|
Python
|
src/config.py
|
rohit04saluja/pinetmonitor
|
286eb763ba4142438b63cffec8afafe4983e502c
|
[
"MIT"
] | null | null | null |
src/config.py
|
rohit04saluja/pinetmonitor
|
286eb763ba4142438b63cffec8afafe4983e502c
|
[
"MIT"
] | null | null | null |
src/config.py
|
rohit04saluja/pinetmonitor
|
286eb763ba4142438b63cffec8afafe4983e502c
|
[
"MIT"
] | null | null | null |
import os, json, sys, logging
class Telegram :
def __init__ (self, config) :
self.access_token = config['access_token']
self.chat_id = config['chat_id']
self.messages = config['messages']
class Config :
def __init__ (self, configPath=os.path.expanduser("~")+"/.pinetmonitorconfig.json") :
try :
config = json.loads(open(configPath).read())
try : self.telegram = Telegram(config["telegram"])
except KeyError :
pass # TODO: Add else case to raise an exception for missing access_token
try : self.dest = config["dest"]
except KeyError : self.dest = '8.8.8.8'
logging.info("Destination is {}".format(self.dest))
try : self.interval = config["interval"]
except KeyError : self.interval = 5
logging.info("Interval is {}".format(self.interval))
try : self.export = config["export"].replace("~", os.path.expanduser("~"))
except KeyError : self.export = None
if self.export is not None : logging.info("Export is enabled at {}".format(self.export))
try : self.succ_retry = config["succ_retry"]
except KeyError : self.succ_retry = 5
logging.info("Success retry count is {}".format(self.succ_retry))
try : self.fail_retry = config["fail_retry"]
except KeyError : self.fail_retry = 5
logging.info("Failure retry count is {}".format(self.fail_retry))
except FileNotFoundError :
print("Please create config file at {} from config_template.json".format(configPath))
sys.exit(1)
| 44
| 100
| 0.600478
|
4a055955938b67d7ef0cb905e02d88f67948dae8
| 100
|
py
|
Python
|
tests/conftest.py
|
erobic/MetaNN
|
f229cde5754c3826045ceabba04d4d4e55d918b5
|
[
"MIT"
] | 22
|
2019-06-13T08:21:54.000Z
|
2022-01-18T12:43:43.000Z
|
tests/conftest.py
|
erobic/MetaNN
|
f229cde5754c3826045ceabba04d4d4e55d918b5
|
[
"MIT"
] | 6
|
2019-08-14T23:13:56.000Z
|
2021-06-18T02:12:31.000Z
|
tests/conftest.py
|
erobic/MetaNN
|
f229cde5754c3826045ceabba04d4d4e55d918b5
|
[
"MIT"
] | 6
|
2019-06-13T08:27:57.000Z
|
2021-12-20T12:38:09.000Z
|
import sys, os
sys.path.append((os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))))
| 20
| 82
| 0.68
|
4a055acaf4366c021268f052cf314f579e9bd0b2
| 53
|
py
|
Python
|
utils/__init__.py
|
yz-cnsdqz/MOJO-release
|
476b40c8111861c6ab6b193a68e634d9aeb4e407
|
[
"MIT"
] | 58
|
2021-06-18T17:00:06.000Z
|
2022-03-20T12:21:12.000Z
|
utils/__init__.py
|
wei-mao-2019/gsps
|
7f8de905f49bc739747174ade343a431ec8fe74e
|
[
"MIT"
] | 5
|
2021-09-10T07:04:38.000Z
|
2022-01-18T17:35:00.000Z
|
utils/__init__.py
|
wei-mao-2019/gsps
|
7f8de905f49bc739747174ade343a431ec8fe74e
|
[
"MIT"
] | 3
|
2021-06-24T04:04:07.000Z
|
2021-06-30T14:22:54.000Z
|
from utils.torch import *
from utils.logger import *
| 17.666667
| 26
| 0.773585
|
4a055ad8589444634474bb8879b24b7c7c1f02da
| 7,444
|
py
|
Python
|
oscarapi/serializers/product.py
|
remintz/django-oscar-api
|
3a378f29bab8e450230cd23b77f2dff35b5d445a
|
[
"BSD-3-Clause"
] | null | null | null |
oscarapi/serializers/product.py
|
remintz/django-oscar-api
|
3a378f29bab8e450230cd23b77f2dff35b5d445a
|
[
"BSD-3-Clause"
] | null | null | null |
oscarapi/serializers/product.py
|
remintz/django-oscar-api
|
3a378f29bab8e450230cd23b77f2dff35b5d445a
|
[
"BSD-3-Clause"
] | null | null | null |
from rest_framework import serializers
from django.utils.translation import ugettext as _
from oscarapi.utils import (
OscarModelSerializer,
overridable,
OscarHyperlinkedModelSerializer
)
from oscar.core.loading import get_model
Product = get_model('catalogue', 'Product')
ProductClass = get_model('catalogue', 'ProductClass')
ProductCategory = get_model('catalogue', 'ProductCategory')
ProductAttribute = get_model('catalogue', 'ProductAttribute')
ProductAttributeValue = get_model('catalogue', 'ProductAttributeValue')
AttributeOption = get_model('catalogue', 'AttributeOption')
ProductImage = get_model('catalogue', 'ProductImage')
Option = get_model('catalogue', 'Option')
Partner = get_model('partner', 'Partner')
StockRecord = get_model('partner', 'StockRecord')
ProductRecord = get_model('analytics', 'ProductRecord')
class PartnerSerializer(OscarModelSerializer):
class Meta:
model = Partner
fields = '__all__'
class OptionSerializer(OscarHyperlinkedModelSerializer):
class Meta:
model = Option
fields = overridable('OSCARAPI_OPTION_FIELDS', default=(
'url', 'id', 'name', 'code', 'type'
))
class ProductAttributeValueSerializer(OscarModelSerializer):
name = serializers.CharField(source="attribute.name")
code = serializers.CharField(source="attribute.code")
value = serializers.SerializerMethodField()
def get_value(self, obj):
obj_type = obj.attribute.type
if obj_type == ProductAttribute.OPTION:
return obj.value.option
elif obj_type == ProductAttribute.MULTI_OPTION:
return obj.value.values_list('option', flat=True)
elif obj_type == ProductAttribute.FILE:
return obj.value.url
elif obj_type == ProductAttribute.IMAGE:
return obj.value.url
elif obj_type == ProductAttribute.ENTITY:
if hasattr(obj.value, 'json'):
return obj.value.json()
else:
return _(
"%(entity)s has no json method, can not convert to json" % {
'entity': repr(obj.value)
}
)
# return the value as stored on ProductAttributeValue in the correct type
return obj.value
class Meta:
model = ProductAttributeValue
fields = overridable(
'OSCARAPI_PRODUCT_ATTRIBUTE_VALUE_FIELDS',
default=('name', 'value', 'code'))
class ProductAttributeSerializer(OscarModelSerializer):
productattributevalue_set = ProductAttributeValueSerializer(many=True)
class Meta:
model = ProductAttribute
fields = overridable(
'OSCARAPI_PRODUCT_ATTRIBUTE_FIELDS',
default=('name', 'productattributevalue_set'))
class ProductImageSerializer(OscarModelSerializer):
class Meta:
model = ProductImage
fields = '__all__'
class ProductStockRecordsSerializer(OscarModelSerializer):
class Meta:
model = StockRecord
fields = '__all__'
class ProductRecordsSerializer(OscarModelSerializer):
class Meta:
model = ProductRecord
fields = '__all__'
class AvailabilitySerializer(serializers.Serializer):
is_available_to_buy = serializers.BooleanField()
num_available = serializers.IntegerField(required=False)
message = serializers.CharField()
class RecommmendedProductSerializer(OscarModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='product-detail')
class Meta:
model = Product
fields = overridable(
'OSCARAPI_RECOMMENDED_PRODUCT_FIELDS', default=('url',))
class BaseProductSerializer(OscarModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='product-detail')
#stockrecords = serializers.HyperlinkedIdentityField(
# view_name='product-stockrecord-list')
stockrecords = ProductStockRecordsSerializer(many=True, required=False)
stats = ProductRecordsSerializer(many=False, required=False)
attributes = ProductAttributeValueSerializer(
many=True, required=False, source="attribute_values")
categories = serializers.StringRelatedField(many=True, required=False)
product_class = serializers.StringRelatedField(required=False)
price = serializers.HyperlinkedIdentityField(view_name='product-price')
availability = serializers.HyperlinkedIdentityField(
view_name='product-availability')
options = OptionSerializer(many=True, required=False)
recommended_products = RecommmendedProductSerializer(
many=True, required=False)
def get_field_names(self, declared_fields, info):
"""
Override get_field_names to make sure that we are not getting errors
for not including declared fields.
"""
return super(BaseProductSerializer, self).get_field_names({}, info)
class Meta:
model = Product
class ChildProductserializer(BaseProductSerializer):
parent = serializers.HyperlinkedRelatedField(
view_name='product-detail', queryset=Product.objects)
# the below fields can be filled from the parent product if enabled.
images = ProductImageSerializer(many=True, required=False, source='parent.images')
description = serializers.CharField(source='parent.description')
class Meta(BaseProductSerializer.Meta):
fields = overridable(
'OSCARAPI_CHILDPRODUCTDETAIL_FIELDS',
default=(
'url', 'upc', 'id', 'title', 'structure',
# 'parent', 'description', 'images', are not included by default, but
# easily enabled by overriding OSCARAPI_CHILDPRODUCTDETAIL_FIELDS
# in your settings file
'date_created', 'date_updated', 'recommended_products',
'attributes', 'categories', 'product_class',
'stockrecords', 'price', 'availability', 'options', 'stats'))
class ProductSerializer(BaseProductSerializer):
images = ProductImageSerializer(many=True, required=False)
children = ChildProductserializer(many=True, required=False)
class Meta(BaseProductSerializer.Meta):
fields = overridable(
'OSCARAPI_PRODUCTDETAIL_FIELDS',
default=(
'url', 'upc', 'id', 'title', 'description', 'structure',
'date_created', 'date_updated', 'recommended_products',
'attributes', 'categories', 'product_class',
'stockrecords', 'images', 'price', 'availability', 'options',
'children', 'stats'))
class ProductLinkSerializer(ProductSerializer):
class Meta(BaseProductSerializer.Meta):
fields = overridable(
'OSCARAPI_PRODUCT_FIELDS', default=(
'url', 'id', 'upc', 'title'
))
class OptionValueSerializer(serializers.Serializer):
option = serializers.HyperlinkedRelatedField(
view_name='option-detail', queryset=Option.objects)
value = serializers.CharField()
class AddProductSerializer(serializers.Serializer):
"""
Serializes and validates an add to basket request.
"""
quantity = serializers.IntegerField(required=True)
url = serializers.HyperlinkedRelatedField(
view_name='product-detail', queryset=Product.objects, required=True)
options = OptionValueSerializer(many=True, required=False)
class Meta:
model = Product
| 36.851485
| 86
| 0.686459
|
4a055b07662a9999827aff44cb388db06daf970b
| 14,013
|
py
|
Python
|
wrkzcoin/daemon.py
|
wrkzcoin/wrkzcoin-rpc-python
|
270607adfbab65b13f5560dd46269859d95e563f
|
[
"MIT"
] | 1
|
2021-12-08T10:47:21.000Z
|
2021-12-08T10:47:21.000Z
|
wrkzcoin/daemon.py
|
wrkzcoin/wrkzcoin-rpc-python
|
270607adfbab65b13f5560dd46269859d95e563f
|
[
"MIT"
] | null | null | null |
wrkzcoin/daemon.py
|
wrkzcoin/wrkzcoin-rpc-python
|
270607adfbab65b13f5560dd46269859d95e563f
|
[
"MIT"
] | null | null | null |
import logging
import aiohttp
import asyncio
import json
from typing import Dict
class Daemon:
"""
Integrates with JSON-RPC interface of `Wrkzd`.
"""
def __init__(self, host='127.0.0.1', port=17856):
self.url = f'http://{host}:{port}'
self.headers = {'content-type': 'application/json'}
self.post_url = self.url + '/json_rpc'
async def _make_post(self, method, payload: Dict = None):
payload = {
'jsonrpc': '2.0',
'method': method,
'params': payload or {}
}
print(payload)
logging.debug(json.dumps(payload, indent=4))
async with aiohttp.ClientSession(headers=self.headers) as session:
async with session.post(self.post_url, ssl=False, json=payload) as response:
print(response)
if response.status == 200:
response_data = await response.json()
await session.close()
if 'error' in response_data:
raise ValueError(response_data['error'])
return response_data
async def _make_get_request(self, method):
get_url = self.url + '/' + method
async with aiohttp.ClientSession(headers=self.headers) as session:
async with session.get(get_url, ssl=False) as response:
print(response)
if response.status == 200:
response_data = await response.json()
await session.close()
return response_data
async def get_height(self):
"""
Returns current chain height
Returns:
dict::
{
'height': 613945,
'network_height': 613945,
'status': 'OK'
}
"""
return self._make_get_request('getheight')
async def get_info(self):
"""
Returns information of network and connection
Returns:
dict::
{
'alt_blocks_count': 7,
'difficulty': 162204943,
'grey_peerlist_size': 736,
'hashrate': 5406831,
'height': 613945,
'incoming_connections_count': 0,
'last_known_block_index': 613942,
'major_version': 4,
'minor_version': 0,
'network_height': 613945,
'outgoing_connections_count': 8,
'start_time': 1531374018,
'status': 'OK',
'supported_height': 620000,
'synced': True,
'testnet': False,
'tx_count': 719763,
'tx_pool_size': 0,
'upgrade_heights': [
187000,
350000,
440000,
620000,
...
],
'version': '0.6.4',
'white_peerlist_size': 52
}
"""
return self._make_get_request('getinfo')
async def get_transactions(self):
"""
Returns array of missed transactions
Returns:
dict::
{
'missed_tx': [],
'status': 'OK',
'txs_as_hex': []
}
"""
return self._make_get_request('gettransactions')
async def get_peers(self):
"""
Returns array of peers connected to the daemon
Returns:
dict::
{
'peers': [
142.44.212.51:11897,
45.55.33.219:11897.
...
],
'status': 'OK
}
"""
return self._make_get_request('getpeers')
async def get_fee_info(self):
"""
Returns information on fee set by remote node
Returns:
dict::
{
'address': '',
'amount': 0,
'status': "Node's fee address is not set"
}
"""
return self._make_get_request('feeinfo')
async def get_block_count(self):
"""
Returns current chain height.
Returns:
dict::
{
"jsonrpc":"2.0",
"result":{
"count":560915,
"status":"OK"
}
}
"""
return await self._make_post('getblockcount')
async def get_block_hash(self, block_hash):
"""
Returns block hash for a given height off by one
Args:
height : 123456
Returns:
dict:: result
{
"jsonrpc": "2.0",
"result": "4bd7dd9649a006660e113efe49691e0739d9838d044774f18732111b145347c8"
}
"""
return await self._make_post('on_getblockhash', block_hash)
async def get_block_template(self, reserve_size, wallet_address):
"""
Returns blocktemplate with an empty "hole" for nonce.
Args:
reserve_size (int): 123
wallet_address (str): a valid wallet address
Returns:
dict: the block template::
{
"blocktemplate_blob": "0300f29a5cddd1a88f9b95...",
"difficulty": 273666101,
"height": 286393,
"reserved_offset": 412,
"status": "OK"
}
"""
params = {'reserve_size': reserve_size,
'wallet_address': wallet_address}
return await self._make_post('getblocktemplate', params)
async def submit_block(self, block_blob):
"""
Submits a block
Args:
block_blob (str) : a valid block blob ...
Returns:
dict::
{
"jsonrpc": "2.0"
"result": {
"status": "OK"
}
}
"""
return await self._make_post('submitblock', block_blob)
async def get_last_block_header(self):
"""
Returns last block header.
Returns:
dict: information about the last block header::
{
'block_header': {
'depth': 0,
'difficulty': 226559499,
'hash': '34aa8777302f4856e360fef49a0a7b6c78cc8eff999c0c716bad234837917986',
'height': 286397,
'major_version': 3,
'minor_version': 0,
'nonce': 18205,
'orphan_status': False,
'prev_hash': '522f53dae525f0a66064377c41bc1f78c6eb4eea2b3e7630efccd395bb17f43f',
'reward': 2954906,
'timestamp': 1521732086
},
'status': 'OK'
}
"""
return await self._make_post('getlastblockheader')
async def get_block_header_by_hash(self, hash):
"""
Returns last block header by given hash.
Args:
hash (str): a valid block hash
Returns:
dict: See getlastblockheader
"""
params = {'hash': hash}
return await self._make_post('getblockheaderbyhash', params)
async def get_block_header_by_height(self, height):
"""
Returns last block header by given hash.
Args:
hash (int): a valid block height
Returns:
dict: See getlastblockheader
"""
params = {'height': height}
return await self._make_post('getblockheaderbyheight', params)
async def get_currency_id(self):
"""
Returns unique currency identifier.
Returns:
dict::
{'currency_id_blob': '7fb97df81221dd1366051b2...'}
"""
return await self._make_post('getcurrencyid')
async def get_blocks(self, height):
"""
Returns information on the last 30 blocks before height (inclusive)
Args:
height: the height of the blockchain to start at
Returns:
dict::
{
"jsonrpc": "2.0",
"result": {
'blocks':[
{
"cumul_size": 22041,
"difficulty": 285124963,
"hash": "62f0058453292af5e1aa070f8526f7642ab6974c6af2c17088c21b31679c813d",
"height": 500000,
"timestamp": 1527834137,
"tx_count": 4
},
.....,
.....,
],
"status": "OK"
}
}
"""
params = {'height': height}
return await self._make_post('f_blocks_list_json', params)
async def get_block(self, block_hash):
"""
Returns information on a single block
Args:
block_hash: Block hash of the block you wish to retrieve
Returns:
dict::
{
"block": {
"alreadyGeneratedCoins": "1484230931125",
"alreadyGeneratedTransactions": 974921,
"baseReward": 2935998,
"blockSize": 48846,
"depth": 0,
"difficulty": 358164537,
"effectiveSizeMedian": 100000,
"hash": "f11580d74134ac34673c74f8da458080aacbe1eccea05b197e9d10bde05139f5",
"height": 501854,
"major_version": 4,
"minor_version": 0,
"nonce": 214748383,
"orphan_status": false,
"penalty": 0,
"prev_hash": "674046ea53a8673c630bd34655c4723199e69fdcfd518503f4c714e16a7121b5",
"reward": 2936608,
"sizeMedian": 231,
"timestamp": 1527891820,
"totalFeeAmount": 610,
"transactions": [
{
"amount_out": 2936608,
"fee": 0,
"hash": "61b29d7a3fe931928388f14cffb5e705a68db219e1df6b4e15aee39d1c2a16e8",
"size": 266
},
.....,
.....,
],
"transactionsCumulativeSize": 48535
},
"status": "OK"
}
"""
params = {'hash': block_hash}
return await self._make_post('f_block_json', params)
async def get_transaction(self, transaction_hash):
"""
Gets information on the single transaction
Args:
transaction_hash: (str) The transaction hash
Returns:
dict::
{
"block": {
"cumul_size": 22041,
"difficulty": 103205633,
"hash": "62f0058453292af5e1aa070f8526f7642ab6974c6af2c17088c21b31679c813d",
"height": 500000,
"timestamp": 1527834137,
"tx_count": 4
},
"status": "OK",
"tx": {
"extra": "019e430ecdd501714900c71cb45fd49b4fa77ebd4a68d967cc2419ccd4e72378e3020800000000956710b6",
"unlock_time": 500040,
"version": 1,
"vin": [
{
"type": "ff",
"value": {
"height": 500000
}
}
],
"vout": [
{
"amount": 80,
"target": {
"data": {
"key": "5ce69a87940df7ae8443261ff610861d2e4207a7556ef1aa35878c0a5e7e382d"
},
"type": "02"
}
},
.....,
.....,
]
},
"txDetails": {
"amount_out": 2936280,
"fee": 0,
"hash": "702ad5bd04b9eff14b080d508f69a320da1909e989d6c163c18f80ae7a5ab832",
"mixin": 0,
"paymentId": "",
"size": 266
}
}
"""
params = {'hash' : transaction_hash}
return await self._make_post('f_transaction_json', params)
async def get_transaction_pool(self):
"""
Gets the list of transaction hashs in the mempool.
Returns:
dict::
{
"jsonrpc": "2.0"
"transactions": [
{
"amount_out": 1660000,
"fee": 0,
"hash": "721ae50994d5446d5683ca79d6fa97dce321a39e88e1df70ae433dc67573841b",
"size": 13046
},
.....,
.....,
]
}
"""
return await self._make_post('f_on_transactions_pool_json')
| 30.200431
| 118
| 0.426033
|
4a055b162109346f6f817c6542fd74c3a211ea24
| 404
|
py
|
Python
|
ChessDRF/server/migrations/0003_game_has_transformation.py
|
Pythongor/Chess-DRF
|
d8dedd9fa6db85c00e6e3ee4c63729448a9bfba5
|
[
"MIT"
] | null | null | null |
ChessDRF/server/migrations/0003_game_has_transformation.py
|
Pythongor/Chess-DRF
|
d8dedd9fa6db85c00e6e3ee4c63729448a9bfba5
|
[
"MIT"
] | 3
|
2021-09-08T03:38:52.000Z
|
2022-01-13T03:57:08.000Z
|
ChessDRF/server/migrations/0003_game_has_transformation.py
|
Pythongor/Chess-DRF
|
d8dedd9fa6db85c00e6e3ee4c63729448a9bfba5
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.3 on 2020-10-22 09:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('server', '0002_remove_game_transformation'),
]
operations = [
migrations.AddField(
model_name='game',
name='has_transformation',
field=models.BooleanField(default=False),
),
]
| 21.263158
| 54
| 0.616337
|
4a055c4727ef2151734bc3bdd3ae09d3053e0dbc
| 6,408
|
py
|
Python
|
testscripts/RDKB/component/CosaCM/TS_COSACM_GetMDDIPOverride.py
|
cablelabs/tools-tdkb
|
1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69
|
[
"Apache-2.0"
] | null | null | null |
testscripts/RDKB/component/CosaCM/TS_COSACM_GetMDDIPOverride.py
|
cablelabs/tools-tdkb
|
1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69
|
[
"Apache-2.0"
] | null | null | null |
testscripts/RDKB/component/CosaCM/TS_COSACM_GetMDDIPOverride.py
|
cablelabs/tools-tdkb
|
1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69
|
[
"Apache-2.0"
] | null | null | null |
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2016 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version="1.0" encoding="UTF-8"?><xml>
<id/>
<version>1</version>
<name>TS_COSACM_GetMDDIPOverride</name>
<primitive_test_id/>
<primitive_test_name>COSACM_GetMDDIPOverride</primitive_test_name>
<primitive_test_version>1</primitive_test_version>
<status>FREE</status>
<synopsis/>
<groups_id/>
<execution_time>1</execution_time>
<long_duration>false</long_duration>
<remarks/>
<skip>false</skip>
<box_types>
<box_type>Broadband</box_type>
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
</rdk_versions>
<test_cases>
<test_case_id>TC_COSACM_34</test_case_id>
<test_objective>To Validate Cable Modem
"CosaDmlCMGetMDDIPOverride" API</test_objective>
<test_type>Positive</test_type>
<test_setup>Emulator,
XB3</test_setup>
<pre_requisite>1.Ccsp Components in DUT should be in a running state that includes component under test Cable Modem
2.TDK Agent should be in running state or invoke it through StartTdk.sh script
</pre_requisite>
<api_or_interface_used>None</api_or_interface_used>
<input_parameters>Json Interface:
API Name
COSACM_GetMDDIPOverride
Input
N/A
</input_parameters>
<automation_approch>1.Configure the Function info in Test Manager GUI which needs to be tested
(COSACM_GetMDDIPOverride - func name - "If not exists already" ( This is considered as default Primitive test case)
cosacm - module name
Necessary I/P args if needed as Mentioned in Input)
2.Create a Python Script in Test Manager with default primitive test case through add new rdkb script option (TS_COSACM_GetMDDIPOverride.py)
3.Customize the generated script template to handle load/unload and pass/fail scenarios
3.Execute the generated Script(TS_COSACM_GetMDDIPOverride.py) using execution page of Test Manager GUI
4.cosacmstub which is a part of TDK Agent process, will be in listening mode to execute TDK Component function named COSACM_GetMDDIPOverride through registered TDK cosacmstub function along with necessary Entry Values as arguments
5.COSACM_GetMDDIPOverride function will call ssp_cosacm_get_mddipoverride,that inturn will call relevant cm hal Function to get/fetch CM data model value. In prior ssp_cosacm_create and ssp_coscm_initialize functions are called in sequence to allocate memory for CM datamodel and initialize with default values
6.Responses(printf) from TDK Component,Ccsp Library function and cosacmstub would be logged in Agent Console log based on the debug info redirected to agent console
7.cosacmstub function COSACM_GetMDDIPOverride will validate the available result (return value from ssp_cosacm_get_mddipoverride as success(0)) with expected result (success(0)) and the output argument value ( Override mode value as string ) is updated in agent console log and json output variable along with return value
8.TestManager will publish the result in GUI as PASS/FAILURE based on the response from COSACM_GetMDDIPOverride function</automation_approch>
<except_output>CheckPoint 1:
Cosa CM Get MDD Override success log from DUT should be available in Agent Console Log
CheckPoint 2:
TDK agent Test Function will log the test case result as PASS based on API response which will be available in Test Manager Result ( XLS)
CheckPoint 3:
TestManager GUI will publish the result as PASS in Execution/Console page of Test Manager</except_output>
<priority>High</priority>
<test_stub_interface>None</test_stub_interface>
<test_script>TS_COSACM_GetMDDIPOverride</test_script>
<skipped>No</skipped>
<release_version/>
<remarks>None</remarks>
</test_cases>
</xml>
'''
import tdklib;
import time;
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("cosacm","RDKB");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_COSACM_GetMDDIPOverride');
#Get the result of connection with test component and STB
loadmodulestatus =obj.getLoadModuleResult();
print "[LIB LOAD STATUS] : %s" %loadmodulestatus ;
if "SUCCESS" in loadmodulestatus.upper():
obj.setLoadModuleStatus("SUCCESS");
#Script to load the configuration file of the component
tdkTestObj = obj.createTestStep("COSACM_GetMDDIPOverride");
expectedresult="SUCCESS";
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
details = tdkTestObj.getResultDetails();
print "TEST STEP 1: Retrieve the MDD IP Override Status";
print "EXPECTED RESULT 1: Should retrieve the MDD IP Override Status successfully";
print "ACTUAL RESULT 1: %s" %details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
else:
tdkTestObj.setResultStatus("FAILURE");
details = tdkTestObj.getResultDetails();
print "TEST STEP 1: Retrieve the MDD IP Override Status";
print "EXPECTED RESULT 1: Should retrieve the MDD IP Override Status successfully";
print "ACTUAL RESULT 1: %s" %details;
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
obj.unloadModule("cosacm");
else:
print "Failed to load the module";
obj.setLoadModuleStatus("FAILURE");
print "Module loading failed";
| 46.434783
| 322
| 0.732834
|
4a055c9e5399f24a8e67ca71fc5952460f3f627f
| 5,234
|
py
|
Python
|
hagelslag/util/merge_forecast_data.py
|
djgagne/hagelslag
|
57d1051fa3816ae3a5162702cc7f992ffe2c8408
|
[
"MIT"
] | 50
|
2016-01-22T23:59:41.000Z
|
2022-03-29T12:15:48.000Z
|
hagelslag/util/merge_forecast_data.py
|
djgagne/hagelslag
|
57d1051fa3816ae3a5162702cc7f992ffe2c8408
|
[
"MIT"
] | 20
|
2016-10-25T20:31:26.000Z
|
2022-02-28T18:12:01.000Z
|
hagelslag/util/merge_forecast_data.py
|
djgagne/hagelslag
|
57d1051fa3816ae3a5162702cc7f992ffe2c8408
|
[
"MIT"
] | 25
|
2016-07-13T19:06:56.000Z
|
2021-12-26T15:26:56.000Z
|
import argparse
import json
import traceback
from datetime import datetime
from glob import glob
from multiprocessing import Pool
from os.path import exists
import pandas as pd
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--csv", help="CSV data file directory")
parser.add_argument("-j", "--json", help="JSON forecast file directory")
parser.add_argument("-o", "--out", help="Output path")
parser.add_argument("-s", "--start", help="Start run date in YYYYMMDD format")
parser.add_argument("-e", "--end", help="End run date in YYYYMMDD format")
parser.add_argument("-c", "--cond", help="Condition model list (comma separated)")
parser.add_argument("-m", "--dist", help="Size distribution model list (comma separated)")
parser.add_argument("-n", "--ens", default="SSEF", help="Ensemble system name.")
parser.add_argument("-p", "--proc", type=int, help="Number of processors")
args = parser.parse_args()
start_date = datetime.strptime(args.start, "%Y%m%d")
end_date = datetime.strptime(args.end, "%Y%m%d")
condition_models = args.cond.split(",")
dist_models = args.dist.split(",")
for condition_model in condition_models:
print(condition_model)
for dist_model in dist_models:
print(dist_model)
pool = Pool(args.proc)
def output_combined_files(output):
out_file = args.out + "track_forecast_data_{0}_{1}_combined.csv".format(*output[1:])
if exists(out_file):
output[0].to_csv(out_file, mode="a", header=False)
else:
output[0].to_csv(out_file, mode="w", index_label="Step_ID")
return
csv_files = sorted(glob(args.csv + "track_step_{0}_*.csv".format(args.ens)))
print(csv_files)
for csv_file in csv_files:
run_date = datetime.strptime(csv_file[:-4].split("_")[-1], "%Y%m%d")
if start_date <= run_date <= end_date:
pool.apply_async(merge_input_csv_forecast_json,
(csv_file, args.json, condition_models, dist_models),
callback=output_combined_files)
pool.close()
pool.join()
return
def merge_input_csv_forecast_json(input_csv_file, forecast_json_path, condition_models, dist_models):
"""
Reads forecasts from json files and merges them with the input data from the step csv files.
Args:
input_csv_file: Name of the input data csv file being processed
forecast_json_path: Path to the forecast json files toplevel directory
condition_models: List of models used to forecast hail or no hail
dist_models: List of models used to forecast the hail size distribution
Returns:
"""
try:
run_date = input_csv_file[:-4].split("_")[-1]
print(run_date)
ens_member = "_".join(input_csv_file.split("/")[-1][:-4].split("_")[3:-1])
ens_name = input_csv_file.split("/")[-1].split("_")[2]
input_data = pd.read_csv(input_csv_file, index_col="Step_ID")
full_json_path = forecast_json_path + "{0}/{1}/".format(run_date, ens_member)
track_ids = sorted(input_data["Track_ID"].unique())
model_pred_cols = []
condition_models_ns = []
dist_models_ns = []
gamma_params = ["Shape", "Location", "Scale"]
for condition_model in condition_models:
model_pred_cols.append(condition_model.replace(" ", "-") + "_Condition")
condition_models_ns.append(condition_model.replace(" ", "-"))
for dist_model in dist_models:
dist_models_ns.append(dist_model.replace(" ", "-"))
for param in gamma_params:
model_pred_cols.append(dist_model.replace(" ", "-") + "_" + param)
pred_data = pd.DataFrame(index=input_data.index, columns=model_pred_cols,
dtype=float)
for track_id in track_ids:
track_id_num = track_id.split("_")[-1]
json_filename = full_json_path + "{0}_{1}_{2}_model_track_{3}.json".format(ens_name,
run_date,
ens_member,
track_id_num)
json_file = open(json_filename)
json_data = json.load(json_file)
json_file.close()
for s, step in enumerate(json_data["features"]):
step_id = track_id + "_{0:02d}".format(s)
for cond_model in condition_models_ns:
pred_data.loc[step_id, cond_model + "_Condition"] = step["properties"]["condition_" + cond_model]
for dist_model in dist_models_ns:
pred_data.loc[step_id, [dist_model + "_" + p
for p in gamma_params]] = step["properties"]["dist_" + dist_model]
out_data = input_data.merge(pred_data, left_index=True, right_index=True)
return out_data, ens_name, ens_member
except Exception as e:
print(traceback.format_exc())
raise e
if __name__ == "__main__":
main()
| 45.912281
| 117
| 0.603554
|
4a055ddfa8c05e7f3f0bd7cf8835984585120f0c
| 802
|
py
|
Python
|
rocket_connect/envelope/migrations/0002_auto_20210404_1915.py
|
crashbr/rocket.connect
|
e24914403d0717748501bc3a66a358429372adad
|
[
"MIT"
] | 23
|
2021-04-15T23:19:51.000Z
|
2022-02-21T19:58:31.000Z
|
rocket_connect/envelope/migrations/0002_auto_20210404_1915.py
|
crashbr/rocket.connect
|
e24914403d0717748501bc3a66a358429372adad
|
[
"MIT"
] | 30
|
2021-04-14T22:04:20.000Z
|
2022-03-28T11:22:08.000Z
|
rocket_connect/envelope/migrations/0002_auto_20210404_1915.py
|
crashbr/rocket.connect
|
e24914403d0717748501bc3a66a358429372adad
|
[
"MIT"
] | 5
|
2021-04-16T14:50:32.000Z
|
2022-03-11T23:50:59.000Z
|
# Generated by Django 3.1.7 on 2021-04-04 19:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('envelope', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='message',
name='type',
field=models.CharField(choices=[['incoming', 'Income Message, Raw Message is from Client, Payload to Rocketchat'], ['outgoing', 'Outgoing Message, Raw Message is from Rocket Connect, payload is to Client']], default='incoming', max_length=50),
),
migrations.AlterField(
model_name='message',
name='raw_message',
field=models.JSONField(blank=True, help_text='the message that first came to be connected', null=True),
),
]
| 33.416667
| 255
| 0.629676
|
4a055e3cfac3c4a538e023790f936f4f127777fa
| 2,133
|
py
|
Python
|
frames/hashing.py
|
Hellowlol/frames
|
32ec0fbb997fd13ffda2aeef0c2b3c319a96b389
|
[
"MIT"
] | 2
|
2019-03-17T00:50:43.000Z
|
2019-06-11T14:04:37.000Z
|
frames/hashing.py
|
Hellowlol/frames
|
32ec0fbb997fd13ffda2aeef0c2b3c319a96b389
|
[
"MIT"
] | 1
|
2019-03-24T18:27:27.000Z
|
2019-03-24T18:27:27.000Z
|
frames/hashing.py
|
Hellowlol/frames
|
32ec0fbb997fd13ffda2aeef0c2b3c319a96b389
|
[
"MIT"
] | null | null | null |
import binascii
import io
import cv2
import numpy as np
from frames.video import video_frame_by_frame
image_type = ('.png', '.jpeg', '.jpg')
def _binary_array_to_hex(arr):
return binascii.hexlify(arr.flatten()).decode('ascii')
class ImageHash(object):
"""
Hash encapsulation. Can be used for dictionary keys and comparisons.
"""
def __init__(self, binary_array):
self.hash = binary_array.flatten()
self.pos = []
def add_pos(self, pos):
self.pos.append(pos)
def __str__(self):
return _binary_array_to_hex(self.hash)
def __repr__(self):
return repr(self.hash)
def __sub__(self, other):
if other is None:
raise TypeError('Other hash must not be None.')
if self.hash.size != other.hash.size:
raise TypeError('ImageHashes must be of the same shape.', self.hash.shape, other.hash.shape)
return np.count_nonzero(self.hash != other.hash)
def __eq__(self, other):
if other is None:
return False
return np.array_equal(self.hash, other.hash)
def __ne__(self, other):
if other is None:
return False
return not np.array_equal(self.hash, other.hash)
def __hash__(self):
return sum([2 ** i for i, v in enumerate(self.hash) if v])
def __iter__(self):
yield self
@property
def size(self):
return len(self.pos)
def reshape(self, *args):
# for lazy compat
return self.hash.reshape(*args)
def create_imghash(img):
"""Create a phash of a image/frame"""
if isinstance(img, str):
img = cv2.imread(img, 0)
return cv2.img_hash.pHash(img)
def hash_file(path, step=1, frame_range=False, end=None):
"""Hash a file or image."""
if isinstance(path, str) and path.endswith(image_type):
yield ImageHash(create_imghash(path)), cv2.imread(path, 0), 0
return
for (h, pos) in video_frame_by_frame(path, frame_range=frame_range, step=step, end=end):
hashed_img = create_imghash(h)
nn = ImageHash(hashed_img)
yield nn, h, pos
| 25.094118
| 104
| 0.631036
|
4a055ec19857882757d433ebca446ab201036be1
| 15,860
|
py
|
Python
|
geoalchemy2/types.py
|
andriyor/geoalchemy2
|
6f165b414ccdd6a19349d018db3f0c58a10a1daf
|
[
"MIT"
] | 1
|
2022-02-01T23:52:06.000Z
|
2022-02-01T23:52:06.000Z
|
geoalchemy2/types.py
|
krisHans3n/geoalchemy2
|
604c1fcc824d243698ca46227fd05a8cbbb9db2c
|
[
"MIT"
] | null | null | null |
geoalchemy2/types.py
|
krisHans3n/geoalchemy2
|
604c1fcc824d243698ca46227fd05a8cbbb9db2c
|
[
"MIT"
] | null | null | null |
""" This module defines the :class:`geoalchemy2.types.Geometry`,
:class:`geoalchemy2.types.Geography`, and :class:`geoalchemy2.types.Raster`
classes, that are used when defining geometry, geography and raster
columns/properties in models.
Reference
---------
"""
import warnings
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql.base import ischema_names as postgresql_ischema_names
from sqlalchemy.dialects.sqlite.base import ischema_names as sqlite_ischema_names
from sqlalchemy.sql import func
from sqlalchemy.types import Float
from sqlalchemy.types import Integer
from sqlalchemy.types import UserDefinedType
try:
from .shape import to_shape
SHAPELY = True
except ImportError:
SHAPELY = False
from .comparator import BaseComparator
from .comparator import Comparator
from .elements import CompositeElement
from .elements import RasterElement
from .elements import WKBElement
from .elements import WKTElement
from .exc import ArgumentError
class _GISType(UserDefinedType):
"""
The base class for :class:`geoalchemy2.types.Geometry` and
:class:`geoalchemy2.types.Geography`.
This class defines ``bind_expression`` and ``column_expression`` methods
that wrap column expressions in ``ST_GeomFromEWKT``, ``ST_GeogFromText``,
or ``ST_AsEWKB`` calls.
This class also defines ``result_processor`` and ``bind_processor``
methods. The function returned by ``result_processor`` converts WKB values
received from the database to :class:`geoalchemy2.elements.WKBElement`
objects. The function returned by ``bind_processor`` converts
:class:`geoalchemy2.elements.WKTElement` objects to EWKT strings.
Constructor arguments:
``geometry_type``
The geometry type.
Possible values are:
* ``"GEOMETRY"``,
* ``"POINT"``,
* ``"LINESTRING"``,
* ``"POLYGON"``,
* ``"MULTIPOINT"``,
* ``"MULTILINESTRING"``,
* ``"MULTIPOLYGON"``,
* ``"GEOMETRYCOLLECTION"``,
* ``"CURVE"``,
* ``None``.
The latter is actually not supported with
:class:`geoalchemy2.types.Geography`.
When set to ``None`` then no "geometry type" constraints will be
attached to the geometry type declaration. Using ``None`` here
is not compatible with setting ``management`` to ``True``.
Default is ``"GEOMETRY"``.
``srid``
The SRID for this column. E.g. 4326. Default is ``-1``.
``dimension``
The dimension of the geometry. Default is ``2``.
With ``management`` set to ``True``, that is when ``AddGeometryColumn`` is used
to add the geometry column, there are two constraints:
* The ``geometry_type`` must not end with ``"ZM"``. This is due to PostGIS'
``AddGeometryColumn`` failing with ZM geometry types. Instead the "simple"
geometry type (e.g. POINT rather POINTZM) should be used with ``dimension``
set to ``4``.
* When the ``geometry_type`` ends with ``"Z"`` or ``"M"`` then ``dimension``
must be set to ``3``.
With ``management`` set to ``False`` (the default) ``dimension`` is not
taken into account, and the actual dimension is fully defined with the
``geometry_type``.
``spatial_index``
Indicate if a spatial index should be created. Default is ``True``.
``use_N_D_index``
Use the N-D index instead of the standard 2-D index.
``management``
Indicate if the ``AddGeometryColumn`` and ``DropGeometryColumn``
managements functions should be called when adding and dropping the
geometry column. Should be set to ``True`` for PostGIS 1.x and SQLite. Default is
``False``. Note that this option has no effect for
:class:`geoalchemy2.types.Geography`.
``use_typmod``
By default PostgreSQL type modifiers are used to create the geometry
column. To use check constraints instead set ``use_typmod`` to
``False``. By default this option is not included in the call to
``AddGeometryColumn``. Note that this option is only taken
into account if ``management`` is set to ``True`` and is only available
for PostGIS 2.x.
"""
name = None
""" Name used for defining the main geo type (geometry or geography)
in CREATE TABLE statements. Set in subclasses. """
from_text = None
""" The name of "from text" function for this type.
Set in subclasses. """
as_binary = None
""" The name of the "as binary" function for this type.
Set in subclasses. """
comparator_factory = Comparator
""" This is the way by which spatial operators are defined for
geometry/geography columns. """
cache_ok = False
""" Disable cache for this type. """
def __init__(self, geometry_type='GEOMETRY', srid=-1, dimension=2,
spatial_index=True, use_N_D_index=False, management=False, use_typmod=None,
from_text=None, name=None, nullable=True):
geometry_type, srid = self.check_ctor_args(
geometry_type, srid, dimension, management, use_typmod, nullable)
self.geometry_type = geometry_type
self.srid = srid
if name is not None:
self.name = name
if from_text is not None:
self.from_text = from_text
self.dimension = dimension
self.spatial_index = spatial_index
self.use_N_D_index = use_N_D_index
self.management = management
self.use_typmod = use_typmod
self.extended = self.as_binary == 'ST_AsEWKB'
self.nullable = nullable
def get_col_spec(self):
if not self.geometry_type:
return self.name
return '%s(%s,%d)' % (self.name, self.geometry_type, self.srid)
def column_expression(self, col):
"""Specific column_expression that automatically adds a conversion function"""
return getattr(func, self.as_binary)(col, type_=self)
def result_processor(self, dialect, coltype):
"""Specific result_processor that automatically process spatial elements"""
def process(value):
if value is not None:
kwargs = {}
if self.srid > 0:
kwargs['srid'] = self.srid
if self.extended is not None:
kwargs['extended'] = self.extended
return self.ElementType(value, **kwargs)
return process
def bind_expression(self, bindvalue):
"""Specific bind_expression that automatically adds a conversion function"""
return getattr(func, self.from_text)(bindvalue, type_=self)
def bind_processor(self, dialect):
"""Specific bind_processor that automatically process spatial elements"""
def process(bindvalue):
if isinstance(bindvalue, WKTElement):
if bindvalue.extended:
return '%s' % (bindvalue.data)
else:
return 'SRID=%d;%s' % (bindvalue.srid, bindvalue.data)
elif isinstance(bindvalue, WKBElement):
if dialect.name == 'sqlite' or not bindvalue.extended:
# With SpatiaLite or when the WKBElement includes a WKB value rather
# than a EWKB value we use Shapely to convert the WKBElement to an
# EWKT string
if not SHAPELY:
raise ArgumentError('Shapely is required for handling WKBElement bind '
'values when using SpatiaLite or when the bind value '
'is a WKB rather than an EWKB')
shape = to_shape(bindvalue)
return 'SRID=%d;%s' % (bindvalue.srid, shape.wkt)
else:
# PostGIS ST_GeomFromEWKT works with EWKT strings as well
# as EWKB hex strings
return bindvalue.desc
elif isinstance(bindvalue, RasterElement):
return '%s' % (bindvalue.data)
else:
return bindvalue
return process
@staticmethod
def check_ctor_args(geometry_type, srid, dimension, management, use_typmod, nullable):
try:
srid = int(srid)
except ValueError:
raise ArgumentError('srid must be convertible to an integer')
if geometry_type:
geometry_type = geometry_type.upper()
if management:
if geometry_type.endswith('ZM'):
# PostGIS' AddGeometryColumn does not work with ZM geometry types. Instead
# the simple geometry type (e.g. POINT rather POINTZM) should be used with
# dimension set to 4
raise ArgumentError(
'with management=True use geometry_type={!r} and '
'dimension=4 for {!r} geometries'.format(geometry_type[:-2], geometry_type))
elif geometry_type[-1] in ('Z', 'M') and dimension != 3:
# If a Z or M geometry type is used then dimension must be set to 3
raise ArgumentError(
'with management=True dimension must be 3 for '
'{!r} geometries'.format(geometry_type))
else:
if management:
raise ArgumentError('geometry_type set to None not compatible '
'with management')
if srid > 0:
warnings.warn('srid not enforced when geometry_type is None')
if use_typmod and not management:
warnings.warn('use_typmod ignored when management is False')
if use_typmod is not None and not nullable:
raise ArgumentError(
'The "nullable" and "use_typmod" arguments can not be used together'
)
return geometry_type, srid
class Geometry(_GISType):
"""
The Geometry type.
Creating a geometry column is done like this::
Column(Geometry(geometry_type='POINT', srid=4326))
See :class:`geoalchemy2.types._GISType` for the list of arguments that can
be passed to the constructor.
If ``srid`` is set then the ``WKBElement`` objects resulting from queries will
have that SRID, and, when constructing the ``WKBElement`` objects, the SRID
won't be read from the data returned by the database. If ``srid`` is not set
(meaning it's ``-1``) then the SRID set in ``WKBElement`` objects will be read
from the data returned by the database.
"""
name = 'geometry'
""" Type name used for defining geometry columns in ``CREATE TABLE``. """
from_text = 'ST_GeomFromEWKT'
""" The "from text" geometry constructor. Used by the parent class'
``bind_expression`` method. """
as_binary = 'ST_AsEWKB'
""" The "as binary" function to use. Used by the parent class'
``column_expression`` method. """
ElementType = WKBElement
""" The element class to use. Used by the parent class'
``result_processor`` method. """
cache_ok = False
""" Disable cache for this type. """
class Geography(_GISType):
"""
The Geography type.
Creating a geography column is done like this::
Column(Geography(geometry_type='POINT', srid=4326))
See :class:`geoalchemy2.types._GISType` for the list of arguments that can
be passed to the constructor.
"""
name = 'geography'
""" Type name used for defining geography columns in ``CREATE TABLE``. """
from_text = 'ST_GeogFromText'
""" The ``FromText`` geography constructor. Used by the parent class'
``bind_expression`` method. """
as_binary = 'ST_AsBinary'
""" The "as binary" function to use. Used by the parent class'
``column_expression`` method. """
ElementType = WKBElement
""" The element class to use. Used by the parent class'
``result_processor`` method. """
cache_ok = False
""" Disable cache for this type. """
class Raster(_GISType):
"""
The Raster column type.
Creating a raster column is done like this::
Column(Raster)
This class defines the ``result_processor`` method, so that raster values
received from the database are converted to
:class:`geoalchemy2.elements.RasterElement` objects.
Constructor arguments:
``spatial_index``
Indicate if a spatial index should be created. Default is ``True``.
"""
comparator_factory = BaseComparator
"""
This is the way by which spatial operators and functions are
defined for raster columns.
"""
name = 'raster'
""" Type name used for defining raster columns in ``CREATE TABLE``. """
from_text = 'raster'
""" The "from text" raster constructor. Used by the parent class'
``bind_expression`` method. """
as_binary = 'raster'
""" The "as binary" function to use. Used by the parent class'
``column_expression`` method. """
ElementType = RasterElement
""" The element class to use. Used by the parent class'
``result_processor`` method. """
cache_ok = False
""" Disable cache for this type. """
def __init__(self, spatial_index=True, from_text=None, name=None, nullable=True):
# Enforce default values
super(Raster, self).__init__(
geometry_type=None,
srid=-1,
dimension=2,
spatial_index=spatial_index,
use_N_D_index=False,
management=False,
use_typmod=False,
from_text=from_text,
name=name,
nullable=nullable,
)
self.extended = None
class _DummyGeometry(Geometry):
"""A dummy type only used with SQLite."""
def get_col_spec(self):
return 'GEOMETRY'
class CompositeType(UserDefinedType):
"""
A wrapper for :class:`geoalchemy2.elements.CompositeElement`, that can be
used as the return type in PostgreSQL functions that return composite
values.
This is used as the base class of :class:`geoalchemy2.types.GeometryDump`.
"""
typemap = {}
""" Dictionary used for defining the content types and their
corresponding keys. Set in subclasses. """
class comparator_factory(UserDefinedType.Comparator):
def __getattr__(self, key):
try:
type_ = self.type.typemap[key]
except KeyError:
raise AttributeError("Type '%s' doesn't have an attribute: '%s'"
% (self.type, key))
return CompositeElement(self.expr, key, type_)
class GeometryDump(CompositeType):
"""
The return type for functions like ``ST_Dump``, consisting of a path and
a geom field. You should normally never use this class directly.
"""
typemap = {'path': postgresql.ARRAY(Integer), 'geom': Geometry}
""" Dictionary defining the contents of a ``geometry_dump``. """
cache_ok = True
""" Enable cache for this type. """
# Register Geometry, Geography and Raster to SQLAlchemy's reflection subsystems.
postgresql_ischema_names['geometry'] = Geometry
postgresql_ischema_names['geography'] = Geography
postgresql_ischema_names['raster'] = Raster
sqlite_ischema_names['GEOMETRY'] = Geometry
sqlite_ischema_names['RASTER'] = Raster
class SummaryStats(CompositeType):
"""Define the composite type returned by the function ST_SummaryStatsAgg"""
typemap = {
'count': Integer,
'sum': Float,
'mean': Float,
'stddev': Float,
'min': Float,
'max': Float,
}
cache_ok = True
""" Enable cache for this type. """
| 35.166297
| 100
| 0.625032
|
4a055f2e362901b68886d484f55f60d8c8b6ef62
| 433
|
py
|
Python
|
tests/managers.py
|
unchris/django-model-utils
|
c9c04da1b2b19c44aa2349697b3d07b38c7b34a5
|
[
"BSD-3-Clause"
] | 1
|
2017-03-05T01:43:57.000Z
|
2017-03-05T01:43:57.000Z
|
tests/managers.py
|
unchris/django-model-utils
|
c9c04da1b2b19c44aa2349697b3d07b38c7b34a5
|
[
"BSD-3-Clause"
] | 1
|
2019-08-07T14:32:26.000Z
|
2019-08-07T14:32:26.000Z
|
tests/managers.py
|
unchris/django-model-utils
|
c9c04da1b2b19c44aa2349697b3d07b38c7b34a5
|
[
"BSD-3-Clause"
] | 1
|
2019-11-04T22:51:26.000Z
|
2019-11-04T22:51:26.000Z
|
from __future__ import unicode_literals, absolute_import
from model_utils.managers import SoftDeletableQuerySet, SoftDeletableManager
class CustomSoftDeleteQuerySet(SoftDeletableQuerySet):
def only_read(self):
return self.filter(is_read=True)
class CustomSoftDeleteManager(SoftDeletableManager):
_queryset_class = CustomSoftDeleteQuerySet
def only_read(self):
return self.get_queryset().only_read()
| 27.0625
| 76
| 0.803695
|
4a055f56d1bfdd18b991668b0488eb1bfd1e4cc5
| 14,498
|
py
|
Python
|
indico/modules/rb/models/reservations_test.py
|
bkmgit/indico
|
d77ee121e35880a416b9b05e6098ea912d870b5c
|
[
"MIT"
] | 1
|
2021-06-11T20:02:10.000Z
|
2021-06-11T20:02:10.000Z
|
indico/modules/rb/models/reservations_test.py
|
bkmgit/indico
|
d77ee121e35880a416b9b05e6098ea912d870b5c
|
[
"MIT"
] | null | null | null |
indico/modules/rb/models/reservations_test.py
|
bkmgit/indico
|
d77ee121e35880a416b9b05e6098ea912d870b5c
|
[
"MIT"
] | null | null | null |
# This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from datetime import date, datetime, time
import pytest
from dateutil.relativedelta import relativedelta
from indico.modules.rb.models.reservation_edit_logs import ReservationEditLog
from indico.modules.rb.models.reservation_occurrences import ReservationOccurrence, ReservationOccurrenceState
from indico.modules.rb.models.reservations import RepeatFrequency, RepeatMapping, Reservation, ReservationState
pytest_plugins = 'indico.modules.rb.testing.fixtures'
@pytest.fixture
def overlapping_reservation(create_reservation):
reservation = create_reservation(start_dt=date.today() + relativedelta(hour=2),
end_dt=date.today() + relativedelta(hour=4))
occurrence = ReservationOccurrence(start_dt=date.today() + relativedelta(hour=1),
end_dt=date.today() + relativedelta(hour=5))
return reservation, occurrence
# ======================================================================================================================
# RepeatMapping tests
# ======================================================================================================================
@pytest.mark.parametrize(('repetition', 'message'), (
((RepeatFrequency.NEVER, 0), 'single booking'),
((RepeatFrequency.DAY, 1), 'daily booking'),
((RepeatFrequency.WEEK, 1), 'weekly'),
((RepeatFrequency.WEEK, 2), 'every 2 weeks'),
((RepeatFrequency.MONTH, 1), 'monthly'),
((RepeatFrequency.MONTH, 2), 'every 2 months'),
))
def test_repeat_mapping(repetition, message):
assert RepeatMapping.get_message(*repetition) == message
# ======================================================================================================================
# Hybrid property tests
# ======================================================================================================================
@pytest.mark.parametrize(('days_delta', 'expected'), (
(-1, True), # Reservation in the past
(1, False), # Reservation in the future
(0, False), # Reservation in course
))
def test_is_archived(create_reservation, days_delta, expected):
start_dt = date.today() + relativedelta(days=days_delta, hour=0, minute=0)
end_dt = date.today() + relativedelta(days=days_delta, hour=23, minute=59)
reservation = create_reservation(start_dt=start_dt, end_dt=end_dt)
assert reservation.is_archived == expected
@pytest.mark.parametrize(('repeat_frequency', 'expected'), (
(RepeatFrequency.NEVER, False),
(RepeatFrequency.DAY, True),
(RepeatFrequency.WEEK, True),
(RepeatFrequency.MONTH, True),
))
def test_is_repeating(create_reservation, repeat_frequency, expected):
reservation = create_reservation(repeat_frequency=repeat_frequency)
assert reservation.is_repeating == expected
# ======================================================================================================================
# Property tests
# ======================================================================================================================
def test_booked_for_user(dummy_reservation, dummy_user):
assert dummy_reservation.booked_for_user == dummy_user
def test_booked_for_user_after_change(db, dummy_reservation, create_user):
other_user = create_user(123, first_name='foo', last_name='bar')
assert dummy_reservation.booked_for_name != other_user.full_name
dummy_reservation.booked_for_user = other_user
db.session.flush()
assert dummy_reservation.booked_for_user == other_user
assert dummy_reservation.booked_for_id == other_user.id
assert dummy_reservation.booked_for_name == other_user.full_name
def test_created_by_user(dummy_reservation, dummy_user):
assert dummy_reservation.created_by_user == dummy_user
def test_created_by_user_after_change(db, dummy_reservation, dummy_user):
dummy_reservation.created_by_user = dummy_user
db.session.flush()
assert dummy_reservation.created_by_user == dummy_user
assert dummy_reservation.created_by_id == dummy_user.id
def test_created_by_user_with_no_id(db, dummy_reservation):
dummy_reservation.created_by_id = None
db.session.flush()
db.session.expire(dummy_reservation)
assert dummy_reservation.created_by_user is None
def test_external_details_url(dummy_reservation):
assert dummy_reservation.external_details_url
def test_location_name(dummy_reservation, dummy_location):
assert dummy_location.name == dummy_reservation.location_name
def test_repetition(dummy_reservation):
assert (dummy_reservation.repeat_frequency, dummy_reservation.repeat_interval) == dummy_reservation.repetition
# ======================================================================================================================
# staticmethod tests
# ======================================================================================================================
def test_find_overlapping_with_different_room(overlapping_reservation, create_room):
reservation, occurrence = overlapping_reservation
assert reservation in Reservation.find_overlapping_with(room=reservation.room, occurrences=[occurrence]).all()
assert reservation not in Reservation.find_overlapping_with(room=create_room(), occurrences=[occurrence]).all()
def test_find_overlapping_with_is_not_valid(overlapping_reservation, dummy_user, freeze_time):
freeze_time(datetime.combine(date.today(), time(1)))
reservation, occurrence = overlapping_reservation
assert reservation in Reservation.find_overlapping_with(room=reservation.room,
occurrences=[occurrence]).all()
reservation.cancel(dummy_user, silent=True)
assert reservation not in Reservation.find_overlapping_with(room=reservation.room,
occurrences=[occurrence]).all()
def test_find_overlapping_with_skip_reservation(overlapping_reservation):
reservation, occurrence = overlapping_reservation
assert reservation in Reservation.find_overlapping_with(room=reservation.room, occurrences=[occurrence]).all()
assert reservation not in Reservation.find_overlapping_with(room=reservation.room,
occurrences=[occurrence],
skip_reservation_id=reservation.id).all()
# ======================================================================================================================
# method tests
# ======================================================================================================================
@pytest.mark.parametrize('silent', (True, False))
def test_cancel(smtp, create_reservation, dummy_user, silent, freeze_time):
reservation = create_reservation(start_dt=date.today() + relativedelta(hour=8),
end_dt=date.today() + relativedelta(days=10, hour=17),
repeat_frequency=RepeatFrequency.DAY)
freeze_time(datetime.combine(date.today(), time(7, 30)))
assert not reservation.is_cancelled
assert not any(occ.is_cancelled for occ in reservation.occurrences)
reservation.cancel(user=dummy_user, reason='cancelled', silent=silent)
assert reservation.is_cancelled
assert reservation.rejection_reason == 'cancelled'
assert all(occ.is_cancelled for occ in reservation.occurrences)
if silent:
assert not reservation.edit_logs.count()
assert not smtp.outbox
else:
assert reservation.edit_logs.count() == 1
assert smtp.outbox
@pytest.mark.parametrize('silent', (True, False))
def test_reject(smtp, create_reservation, dummy_user, silent):
reservation = create_reservation(start_dt=date.today() + relativedelta(hour=8),
end_dt=date.today() + relativedelta(days=10, hour=17),
repeat_frequency=RepeatFrequency.DAY)
assert not reservation.is_rejected
assert not any(occ.is_rejected for occ in reservation.occurrences)
reservation.reject(user=dummy_user, reason='rejected', silent=silent)
assert reservation.is_rejected
assert reservation.rejection_reason == 'rejected'
assert all(occ.is_rejected for occ in reservation.occurrences)
if silent:
assert not reservation.edit_logs.count()
assert not smtp.outbox
else:
assert reservation.edit_logs.count() == 1
assert smtp.outbox
@pytest.mark.parametrize('reason', ('My reason.', None))
def test_accept(smtp, create_reservation, dummy_user, reason):
reservation = create_reservation(start_dt=date.today() + relativedelta(hour=8),
end_dt=date.today() + relativedelta(days=10, hour=17),
repeat_frequency=RepeatFrequency.DAY)
assert not reservation.is_rejected
assert not any(occ.is_rejected for occ in reservation.occurrences)
reservation.accept(user=dummy_user, reason=reason)
assert reservation.is_accepted
assert len(smtp.outbox) == 2
if reason:
assert ['My reason' in mail.as_string() for mail in smtp.outbox]
assert reservation.edit_logs.one().info == ['Reservation accepted: My reason.']
else:
assert ['My reason' not in mail.as_string() for mail in smtp.outbox]
assert reservation.edit_logs.one().info == ['Reservation accepted']
def test_add_edit_log(dummy_reservation):
dummy_reservation.add_edit_log(ReservationEditLog(user_name='user', info='Some change'))
assert dummy_reservation.edit_logs.count() == 1
@pytest.mark.parametrize('can_moderate', (True, False))
@pytest.mark.parametrize('is_pending', (True, False))
def test_moderation(dummy_reservation, create_user, is_pending, can_moderate):
user = create_user(123)
if is_pending:
dummy_reservation.state = ReservationState.pending
if can_moderate:
dummy_reservation.room.update_principal(user, permissions={'moderate'})
assert dummy_reservation.can_accept(user) == (is_pending and can_moderate)
assert dummy_reservation.can_reject(user) == can_moderate
@pytest.mark.parametrize('is_manager', (True, False))
@pytest.mark.parametrize('is_past', (True, False))
@pytest.mark.parametrize('state', ReservationState)
@pytest.mark.parametrize('is_admin', (True, False))
def test_room_manager_actions(create_reservation, create_user, is_manager, is_past, state, is_admin):
user = create_user(123, rb_admin=is_admin)
day_offset = -1 if is_past else 1
reservation = create_reservation(start_dt=date.today() + relativedelta(days=day_offset, hour=8, minute=30),
end_dt=date.today() + relativedelta(days=day_offset, hour=8, minute=30))
reservation.state = state
if is_manager:
reservation.room.update_principal(user, full_access=True)
invalid_state = state in (ReservationState.cancelled, ReservationState.rejected)
assert reservation.can_accept(user) == (reservation.is_pending and (is_manager or is_admin) and not invalid_state)
assert reservation.can_reject(user) == (not invalid_state and (is_manager or is_admin))
assert reservation.can_cancel(user) == (not is_past and not invalid_state and is_admin)
assert reservation.can_edit(user) == (((is_manager and not is_past) or is_admin) and not invalid_state)
assert reservation.can_delete(user) == (is_admin and invalid_state)
@pytest.mark.parametrize('is_creator', (True, False))
@pytest.mark.parametrize('is_bookee', (True, False))
@pytest.mark.parametrize('is_past', (True, False))
@pytest.mark.parametrize('state', ReservationState)
def test_user_actions(create_user, create_reservation, is_creator, is_bookee, is_past, state):
user = create_user(123)
day_offset = -1 if is_past else 1
reservation = create_reservation(start_dt=date.today() + relativedelta(days=day_offset, hour=8, minute=30),
end_dt=date.today() + relativedelta(days=day_offset, hour=8, minute=30),
state=state)
if is_creator:
reservation.created_by_user = user
if is_bookee:
reservation.booked_for_user = user
valid_state = state in (ReservationState.pending, ReservationState.accepted)
assert reservation.can_cancel(user) == ((is_creator or is_bookee) and not is_past and valid_state)
assert reservation.can_edit(user) == ((is_creator or is_bookee) and not is_past and valid_state)
def test_actions_no_user(dummy_reservation):
assert not dummy_reservation.can_accept(None)
assert not dummy_reservation.can_cancel(None)
assert not dummy_reservation.can_delete(None)
assert not dummy_reservation.can_edit(None)
assert not dummy_reservation.can_reject(None)
def test_find_excluded_days(db, create_reservation):
reservation = create_reservation(start_dt=date.today() + relativedelta(hour=8),
end_dt=date.today() + relativedelta(days=5, hour=10),
repeat_frequency=RepeatFrequency.DAY)
for occ in reservation.occurrences[::2]:
occ.state = ReservationOccurrenceState.cancelled
db.session.flush()
assert set(reservation.find_excluded_days().all()) == {occ for occ in reservation.occurrences if not occ.is_valid}
def test_find_overlapping(create_reservation):
resv1 = create_reservation(state=ReservationState.pending)
assert not resv1.find_overlapping().count()
resv2 = create_reservation(state=ReservationState.pending)
assert resv1.find_overlapping().one() == resv2
@pytest.mark.parametrize(('is_booked_for', 'expected'), (
(True, True),
(False, False),
))
def test_is_booked_for(dummy_reservation, dummy_user, create_user, is_booked_for, expected):
if not is_booked_for:
dummy_reservation.booked_for_user = create_user(123)
assert dummy_reservation.is_booked_for(dummy_user) == expected
def test_is_booked_for_no_user(dummy_reservation):
assert not dummy_reservation.is_booked_for(None)
def test_is_created_by(dummy_reservation, dummy_user):
assert dummy_reservation.is_owned_by(dummy_user)
| 46.319489
| 120
| 0.665885
|
4a055fb4ae8f463d5fcaa7cfa42077df18f4f8c6
| 15,302
|
py
|
Python
|
nyc-311.py
|
vigneshmanikandan97/nyc-311
|
1e0b158168bb77de180164427850c1b97492221e
|
[
"MIT"
] | 2
|
2021-04-10T18:09:09.000Z
|
2021-04-10T18:09:16.000Z
|
nyc-311.py
|
vigneshmanikandan97/nyc-311
|
1e0b158168bb77de180164427850c1b97492221e
|
[
"MIT"
] | null | null | null |
nyc-311.py
|
vigneshmanikandan97/nyc-311
|
1e0b158168bb77de180164427850c1b97492221e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[152]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# In[153]:
# Import CSV using pandas
nyc_df = pd.read_csv("311_Service_Requests_from_2010_to_Present.csv", low_memory = False)
# <h1>Data Analysis</h1>
# <h3>Identifying dimensions and removing irrelevant data</h3>
# In[154]:
# dimensions of the dataset
nyc_df.shape
# In[155]:
# get dataset info
nyc_df.info()
# In[156]:
# Finding null values in dataset
nyc_df.isnull().sum()
# In[157]:
# Get columns
nyc_df.columns
# In[158]:
# Dropping irrelevant columns based on requirements
nyc_df.drop(['Agency Name','Incident Address','Street Name','Cross Street 1','Cross Street 2','Intersection Street 1',
'Intersection Street 2','Address Type','Park Facility Name','Park Borough','School Name',
'School Number','School Region','School Code','School Phone Number','School Address','School City',
'School State','School Zip','School Not Found','School or Citywide Complaint','Vehicle Type',
'Taxi Company Borough','Taxi Pick Up Location','Bridge Highway Name','Bridge Highway Direction',
'Road Ramp','Bridge Highway Segment','Garage Lot Name','Ferry Direction','Ferry Terminal Name','Landmark',
'X Coordinate (State Plane)','Y Coordinate (State Plane)','Due Date','Resolution Action Updated Date','Community Board','Facility Type',
'Location'], inplace=True, axis=1)
nyc_df.head()
# <h3>Finding Elapsed Time for service requests</h3>
# In[159]:
# Take only records with tickets that are closed
nyc_df = nyc_df[nyc_df['Status'] == "Closed"]
# In[160]:
# Adding new column for complaint elapsed time
nyc_df['Created Date'] = pd.to_datetime(nyc_df['Created Date'])
nyc_df['Closed Date'] = pd.to_datetime(nyc_df['Closed Date'])
nyc_df['Request_Closing_Time'] = nyc_df['Closed Date'] - nyc_df['Created Date']
nyc_df.info()
# In[161]:
# Replacing all null values with "unspecified"
nyc_df.fillna("unspecified", inplace=True)
# Checking unique values again
nyc_df.isnull().sum()
# <h1>Data Visualisation</h1>
# <h3>Complaint Type based on City and it's count</h3>
# In[162]:
grouped_df = pd.DataFrame({"Count": nyc_df.groupby(["Complaint Type", "City"]).size()}).reset_index()
grouped_df.sort_values("Count", ascending = False)
# <h3>Complaint Type vs Complaint Count (Bar Graph)</h3>
# In[163]:
ax1 = nyc_df["Complaint Type"].value_counts().plot(kind = 'barh', alpha = 0.6, figsize = (7, 10))
ax1.set_xlabel("complaint count")
ax1.set_ylabel("complaint type")
# As we can see in the above graph, NYPD receives maximum complaints for "Blocked Driveway"
# <h3>City vs Complaint Count (Bar Graph)</h3>
# In[164]:
# City with highest number of complaints
ax2 = nyc_df["City"].value_counts().plot(kind = 'bar', alpha = 0.6, figsize = (40, 10))
ax2.set_xlabel("complaint count")
ax2.set_ylabel("city name")
# As seen in the bar graph above, the city with the highest number of complaints is "Brooklyn"
# In[165]:
# Finding the most common complaints in New York City
complaintTypeByCity = pd.DataFrame({"Complaint Count": nyc_df[nyc_df["City"] == 'BROOKLYN'] .groupby(["Complaint Type", "City"]).size()}).reset_index()
complaintTypeByCity.sort_values("Complaint Count", ascending = False)
# As seen in the above table, "Blocked Driveway" is the highest reported 311 call by the people of "Brooklyn".
# <h3>Data Visualisation of Boroughs</h3>
# In[166]:
groupedByBorough = pd.DataFrame({"Complaint Count": nyc_df.groupby("Borough").size()})
groupedByBorough
# In[167]:
groupedByBorough.plot(kind="barh", alpha=0.6, figsize=(7, 7))
# As seen in the above bar graph, "Brooklyn" is the borough with the highest number of complaints.
# In[168]:
# Top complaints in dataset
groupedByDescriptor = pd.DataFrame({"Count": nyc_df.groupby("Descriptor").size()})
groupedByDescriptor.sort_values("Count", ascending=False)
dax = groupedByDescriptor.plot(kind="barh", alpha=0.6, figsize=(7, 14))
dax.set_xlabel = "Count"
dax.set_ylabel = "Descriptor"
plt.show()
# Based on the above observations, "Loud Music/Party" is the highest reported descriptor.
# ### Top complaints by Borough
# In[169]:
# Top 5 complaints in each borough and their counts
topComplaints = ['Blocked Driveway','Illegal Parking','Noise - Commercial','Noise - Street/Sidewalk','Derelict Vehicle','Animal Abuse']
topComplaintsByBorough = nyc_df.groupby(['Borough','Complaint Type']).size().unstack()
topComplaintsByBorough = topComplaintsByBorough[topComplaints]
topComplaintsByBorough
# In[170]:
col_number = 2
row_number = 3
fig, axes = plt.subplots(row_number,col_number, figsize=(12,8))
for i, (label,col) in enumerate(topComplaintsByBorough.iteritems()):
ax = axes[int(i/col_number), i%col_number]
col = col.sort_values(ascending=True)[:15]
col.plot(kind='barh', ax=ax)
ax.set_title(label)
plt.tight_layout()
# * Clearly Manhattan is making most of the noise, followed by Brooklyn.
# * Brooklyn has the most number of 'Illegal parking' complaints and is also on the top for 'Animal Abuse'. (Savages!)
# * Queens has highest complaints for 'Blocked Driveway' as well as for 'Derelict Vehicle'.
# ### Complaints vs Borough plot
# In[171]:
complaintsByBorough = nyc_df.groupby(['Complaint Type','Borough']).size().unstack()
col_number = 2
row_number = 3
fig, axes = plt.subplots(row_number,col_number, figsize=(12,12))
for i, (label,col) in enumerate(complaintsByBorough.iteritems()):
ax = axes[int(i/col_number), i%col_number]
col = col.sort_values(ascending=True)[:15]
col.plot(kind='barh', ax=ax)
ax.set_title(label)
plt.tight_layout()
# * Apart from Manhattan, number of Complaints for 'Blocked Driveway' and 'Illegal Parking' is highest for each Borough.
# * Manhattan has highest number of Noise complaints. Fortunaltely, parking in Manhattan is better than other Boroughs.
# ### Plots based on Complaint Elapsed time
# In[172]:
# Convert elapsed time to hours (ceil rounding so, adding '1')
nyc_df['Request_Closing_Hours'] = nyc_df['Request_Closing_Time'].astype('timedelta64[h]') + 1
nyc_df[['Request_Closing_Time', 'Request_Closing_Hours']].head()
# In[173]:
etMean = nyc_df['Request_Closing_Hours'].mean()
etStandardDeviation = nyc_df['Request_Closing_Hours'].std()
print("Average elapsed time is {:.1f} hour(s) and Standard Deviation is {:.1f} hour(s)".format(etMean, etStandardDeviation))
# In[174]:
nyc_df['Request_Closing_Hours'].sort_values().tail()
# * As we can see, some of the closing times are too high and hence will be dealt as outliers.
# * We will convert the Request Closing time to normal z statistics and will remove any record having value more than 1.
# * z-statistic = (value-mean)/std
#
# #### Next we will plot the histogram of our Request_Closing_Time.
# In[175]:
plot_data = nyc_df[ ((nyc_df['Request_Closing_Hours'] - etMean)/etStandardDeviation) < 1]
plot_data['Request_Closing_Hours'].hist(bins=9)
plt.xlabel('Time(Hours) to close the request')
plt.ylabel('Number of Requets')
plt.title('Requests closing time distribution')
plt.show()
# * Above distribution shows that around half of overall complaints were closed within 2 to 4 hours.
# * Around 99% of the complaints were closed within 10 hours.
# * Lets look at the closing complaint performance Boroughwise
# In[176]:
elapsedTimeByCity = nyc_df.groupby(['Complaint Type','Borough'])[['Request_Closing_Hours']].mean().unstack()
c = 2
r = 3
fig, axes = plt.subplots(r, c , figsize=(12,12))
for i, (label,col) in enumerate(elapsedTimeByCity.iteritems()):
ax = axes[int(i/c), i % c]
col = col.sort_values(ascending=True)[:15]
col.plot(kind='barh', ax=ax)
ax.set_title(label)
plt.tight_layout()
# * Clealry 'Graffiti' complaints are taking a long time to be closed. Could be because the number of Graffiti complaints are very less and officials are focused on more pressing issues.
# * Manhattan, Bronx and Queens are handling 'Disorderly Youth' complaints very well.
# * Brooklyn is performing well to close Noise and traffic complaints.
#
# #### Lets now do a month wise analysis of complaints.
# In[178]:
import datetime
nyc_df['YYYY-MM'] = nyc_df['Created Date'].apply(lambda x: datetime.datetime.strftime(x, '%Y-%m'))
#Incidents on a monthly basis
monthly_incidents = nyc_df.groupby('YYYY-MM').size().plot(figsize=(12,5), title='Incidents on a monthly basis')
# * January and Februray have the least number of complaints (Holidays or Winter ?)
# * May and September has the highest number of complaints (Summer parties ?)
# In[181]:
# Complaints per Borough through the year
nyc_df.groupby(['YYYY-MM','Borough']).size().unstack().plot(figsize=(15,6))
plt.legend(loc='center left', bbox_to_anchor=(1.0, 0.5))
# * Brooklyn raise highest number of complaints in May and September and least comlaints in February.
#
# #### As for the Request Closing time
# In[182]:
nyc_df.groupby(['YYYY-MM','Borough'])['Request_Closing_Hours'].mean().unstack().plot(figsize=(15,7), title='Processing time per Borough on a monthly basis')
# * Manhattan has the best average closing time throughout the year.
# * Shockingly Bronx has the highest closing time. It also has the leasts number of complaints after Staten island.
# * Staten island's closing time is better than Bronx.
# In[183]:
fig = plt.figure(figsize=(21,6))
plt.subplot(1, 2, 1)
nyc_df.groupby('Borough')['Unique Key'].size().plot(kind='barh',title='Complaints per Borough')
plt.subplot(1, 2, 2)
nyc_df.groupby('Borough')['Request_Closing_Hours'].mean().plot(kind='barh',title='Mean Closing Time per Borough (Hours)')
plt.show()
# ### Conclusion:
# * Most complaints raised in May and September, while least in February.
# * Highest number of complaints raised are of 'Blocked Driveway' (28% of all the complaints)
# * Second highest number of complaints are of Noice Complaints (26% of all the complaints including both streetwalk and commercial complaints)
# * Third highest number of complaints are of 'Illegal Parking' (25% of all the complaints)
# ## Hypothesis Testing
# ### a) Whether the average response time across complaint types is similar or not (overall)
# * First we will convert our hourly timedelta into minuted for more precise results.
# * Below is the distribution of our Request_Closing_Minutes data for 'Noise - Street/Sidewalk' complaint type.
# * We see a positive skewness in data if we limit the range to 1250
#
# #### As our data contains too many outliers , hence we will transform the data using log transformation
# In[193]:
# Checking average response time for Noise - Street/Sidewalk
nyc_df['Request_Closing_Minutes'] = nyc_df['Request_Closing_Time'].astype('timedelta64[m]') + 1
noise_df = nyc_df[nyc_df['Complaint Type'] == 'Noise - Street/Sidewalk']['Request_Closing_Minutes']
noise_df.hist(bins=100,range=(0,1250))
# In[194]:
noise_df.describe()
# * When we look at above statistics, it becomes clear that we have very few but very large values after th 75th percentile.
# * We will take the log of Request_Closing_Minutes for each complaint type and store in a dictionary.
#
# #### Log transformation removes the skewness from the data:
# In[196]:
complaints = {}
for complaint in nyc_df['Complaint Type'].unique():
complaints[complaint] = np.log(nyc_df[nyc_df['Complaint Type'] == complaint]['Request_Closing_Minutes'])
complaints['Noise - Street/Sidewalk'].hist(bins=100)
# Above distribution plot shows that once we apply log Transformation to our data, skewness is almost removed and it looks more like a normal distribution.
# In[192]:
complaints['Noise - Street/Sidewalk'].plot(kind = 'box')
# In[201]:
for complaint in complaints.keys():
print(complaint, complaints[complaint].std(), sep = ": ")
# To conduct our hypothesis test, we will conduct an *ANOVA (analysis of variance) test* as we have to compare the means of more than two groups.
# ##### Below conditions should be met before conducting ANOVA:
# * All distributions must follow a normal distributions curve. We have verified this after the log transformation
# * Standard deviation for all groups must be same. Above output proves that this is true.
# * All samples are drawn independently of each other.
# ### Defining Null and Alternate Hypothesis:
# H(o): Average response time for all the complaints type is same.
# H(a): Average response time for all the complaints type is not same and theres is some difference among the groups.
#
# #### Below We conduct ANOVA test for top 5 type of complaints
# * For a 95% of confidence interval we choose our alpha as 0.05 for 5%
# * Alpha(0.05) is the critical p-value, if our calculated p-value is less than alpha, it will give us strong evidence to reject Null Hypothesis.
#
# **if p < alpha(0.05):** Reject Null Hypothesis, Average response time for all the complaints type is not same.
#
# **if p > alpha(0.05):** Fail to reject Null Hypothesis, Average response time for all the complaints type is same.
# In[205]:
# import f_oneway from scipy.stats library
from scipy.stats import f_oneway as fow
stat, p = fow(complaints['Noise - Street/Sidewalk'],
complaints['Blocked Driveway'],
complaints['Illegal Parking'],
complaints['Derelict Vehicle'],
complaints['Noise - Commercial'])
print('Statistics = %.3f, p = %.3f' % (stat, p))
# interpret hypothesis
alpha = 0.05
if p > alpha:
print('Verdict = Fail to reject H(o) since they have same distributions')
else:
print('Verdict = Reject H(o) since they have different distributions')
# Since our **_p-value is very low_** (and floored to 0.0), we will conclude by saying **_Average response time for all the complaints are not the same_**
# ### b) Are the type of complaint or service requested and location related?
# ##### To find the correlation between location and complaint types, we will consider below columns
# - Complaint Type
# - Borough
# - City
# - Longitude
# - Latitude
# In[256]:
corr_df = nyc_df[['Complaint Type','Borough','Longitude','Latitude','City']]
corr_df.info()
# Let us convert the variables to numerical values as corr() cannot be applied on Object types
# In[257]:
corr_df[corr_df['Latitude'] == 'unspecified']
# In[261]:
pd.options.mode.chained_assignment = None
corr_df.loc[corr_df['Latitude'] == 'unspecified', "Latitude"] = 0
corr_df.loc[corr_df['Longitude'] == 'unspecified', "Longitude"] = 0
corr_df['Latitude'] = corr_df.loc[:, 'Latitude'].astype('float64')
corr_df['Longitude'] = corr_df.loc[:, 'Longitude'].astype('float64')
# #### Note:
# We can ignore the above warning as pandas is trying to warn us about chained indexing
# In[262]:
from sklearn.preprocessing import LabelEncoder
labelEncoder = LabelEncoder()
for col in corr_df.columns:
labelEncoder.fit(corr_df[col])
corr_df[col] = labelEncoder.transform(corr_df[col])
corr_df.head(10)
# In[263]:
corr_df.corr(method="pearson")
# * As we can see from above table, there is no relationship between complaint type and location.
# * Which will be the general idea as in our data, most of the Complaint type are of 'Blocked Driveway' and 'Illegal Parking' which is common in all of the Boroughs.
| 30.421471
| 186
| 0.724154
|
4a056037b52d835ca854cdf47d954d244e142bc4
| 769
|
py
|
Python
|
src/directory/forms.py
|
Aleksander-Protasevich/django_project
|
49d7b0f5e86d544f94340d8343995f6d4b70bbd8
|
[
"Apache-2.0"
] | null | null | null |
src/directory/forms.py
|
Aleksander-Protasevich/django_project
|
49d7b0f5e86d544f94340d8343995f6d4b70bbd8
|
[
"Apache-2.0"
] | null | null | null |
src/directory/forms.py
|
Aleksander-Protasevich/django_project
|
49d7b0f5e86d544f94340d8343995f6d4b70bbd8
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
from django.db.models import fields
from . import models
class AuthorForm(forms.ModelForm):
class Meta:
model = models.Author
fields=('name', 'country')
# def clean(self):
# cleaned_data = super().clean()
# name = cleaned_data.get('name')
# if name == "Alex":
# self.add_error('name', 'Это имя недопустимо')
# return cleaned_data
class SeriesForm(forms.ModelForm):
class Meta:
model = models.Series
fields=('name',)
class GenreForm(forms.ModelForm):
class Meta:
model = models.Genre
fields=('name', 'descr')
class PublishingForm(forms.ModelForm):
class Meta:
model = models.Publishing
fields=('name',)
| 23.30303
| 59
| 0.603381
|
4a056082c16a177c7d727527c39e1f1e8d3e9ab9
| 572
|
py
|
Python
|
shell/util.py
|
houqp/shell.py
|
2b364900db3e2f2c542f21f8c979acd2a3b439b2
|
[
"MIT"
] | 42
|
2015-01-08T04:46:03.000Z
|
2022-03-06T07:35:04.000Z
|
shell/util.py
|
houqp/shell.py
|
2b364900db3e2f2c542f21f8c979acd2a3b439b2
|
[
"MIT"
] | 2
|
2015-06-04T08:53:43.000Z
|
2015-06-08T07:08:51.000Z
|
shell/util.py
|
houqp/shell.py
|
2b364900db3e2f2c542f21f8c979acd2a3b439b2
|
[
"MIT"
] | 7
|
2015-06-03T19:09:53.000Z
|
2021-11-27T05:42:23.000Z
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import tempfile
from .compat import is_py2, is_py3
def str_to_pipe(s):
input_pipe = tempfile.SpooledTemporaryFile()
if (is_py2 and isinstance(s, unicode)) or (is_py3 and isinstance(s, str)):
s = s.encode('utf-8')
input_pipe.write(s)
input_pipe.seek(0)
return input_pipe
def check_attrs(obj, attr_lst):
return all([hasattr(obj, attr) for attr in attr_lst])
if is_py3:
def u(x):
return x
else:
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
| 19.066667
| 78
| 0.653846
|
4a0560f7857822ab0ec95d5b62381635dd32212b
| 1,762
|
py
|
Python
|
cvat/settings/testing.py
|
raunilillemets/cvat
|
c083b5d3a60270121abc3f3fe596ff94ae0eb60f
|
[
"MIT"
] | 2
|
2020-01-10T08:50:50.000Z
|
2020-01-23T06:11:11.000Z
|
cvat/settings/testing.py
|
raunilillemets/cvat
|
c083b5d3a60270121abc3f3fe596ff94ae0eb60f
|
[
"MIT"
] | 29
|
2020-01-28T23:08:18.000Z
|
2022-03-12T00:05:33.000Z
|
cvat/settings/testing.py
|
maitreyamaity/CVAT-SIM-TEST
|
6b97145c8f4584d9ad40a4b6541424955e272e42
|
[
"MIT"
] | 7
|
2021-07-27T09:15:22.000Z
|
2022-03-29T21:20:00.000Z
|
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
from .development import *
import tempfile
_temp_dir = tempfile.TemporaryDirectory(suffix="cvat")
DATA_ROOT = os.path.join(_temp_dir.name, 'data')
os.makedirs(DATA_ROOT, exist_ok=True)
SHARE_ROOT = os.path.join(_temp_dir.name, 'share')
os.makedirs(SHARE_ROOT, exist_ok=True)
# To avoid ERROR django.security.SuspiciousFileOperation:
# The joined path (...) is located outside of the base path component
MEDIA_ROOT = _temp_dir.name
# Suppress all logs by default
for logger in LOGGING["loggers"].values():
if isinstance(logger, dict) and "level" in logger:
logger["level"] = "ERROR"
LOGGING["handlers"]["server_file"] = LOGGING["handlers"]["console"]
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
# When you run ./manage.py test, Django looks at the TEST_RUNNER setting to
# determine what to do. By default, TEST_RUNNER points to
# 'django.test.runner.DiscoverRunner'. This class defines the default Django
# testing behavior.
TEST_RUNNER = "cvat.settings.testing.PatchedDiscoverRunner"
from django.test.runner import DiscoverRunner
class PatchedDiscoverRunner(DiscoverRunner):
def __init__(self, *args, **kwargs):
# Used fakeredis for testing (don't affect production redis)
from fakeredis import FakeRedis, FakeStrictRedis
import django_rq.queues
simple_redis = FakeRedis()
strict_redis = FakeStrictRedis()
django_rq.queues.get_redis_connection = lambda _, strict: strict_redis \
if strict else simple_redis
# Run all RQ requests syncroniously
for config in RQ_QUEUES.values():
config["ASYNC"] = False
super().__init__(*args, **kwargs)
| 34.54902
| 80
| 0.727582
|
4a0561338eb58ed60f1fa0212343c7164a2fdf70
| 117
|
py
|
Python
|
docker/opencv-prebuilt/lib/python3.8/site-packages/cv2/config.py
|
ndtg-ai/docker-opencv-cpp
|
21fe1d9c882cf10ef5f5e48eabe53686b95885dd
|
[
"MIT"
] | null | null | null |
docker/opencv-prebuilt/lib/python3.8/site-packages/cv2/config.py
|
ndtg-ai/docker-opencv-cpp
|
21fe1d9c882cf10ef5f5e48eabe53686b95885dd
|
[
"MIT"
] | null | null | null |
docker/opencv-prebuilt/lib/python3.8/site-packages/cv2/config.py
|
ndtg-ai/docker-opencv-cpp
|
21fe1d9c882cf10ef5f5e48eabe53686b95885dd
|
[
"MIT"
] | null | null | null |
import os
BINARIES_PATHS = [
os.path.join(os.path.join(LOADER_DIR, '../../../../'), 'lib64')
] + BINARIES_PATHS
| 19.5
| 67
| 0.606838
|
4a056158abac281a805223304d5067ea5d8f11ac
| 208
|
py
|
Python
|
trials_colab/stack_gan_trials/main.py
|
bkemmer/GSoC-TensorFlow-2019
|
9bde2939ee073504630e2810496aae3618b5afa2
|
[
"Apache-2.0"
] | 5
|
2021-07-08T15:49:58.000Z
|
2022-03-15T11:18:57.000Z
|
trials_colab/stack_gan_trials/main.py
|
bkemmer/GSoC-TensorFlow-2019
|
9bde2939ee073504630e2810496aae3618b5afa2
|
[
"Apache-2.0"
] | 13
|
2021-04-25T03:32:53.000Z
|
2022-03-11T23:53:16.000Z
|
trials_colab/stack_gan_trials/main.py
|
bkemmer/GSoC-TensorFlow-2019
|
9bde2939ee073504630e2810496aae3618b5afa2
|
[
"Apache-2.0"
] | 8
|
2021-03-08T17:20:43.000Z
|
2022-03-15T11:24:03.000Z
|
# Runner Code
# TODO: Train Stage 1
# TODO: Train Stage 2
import model as stack_gan
# stage1 = stack_gan.StackGanStage1()
# stage1.train_stage1()
stage2 = stack_gan.StackGanStage2()
stage2.train_stage2()
| 17.333333
| 37
| 0.75
|
4a056270b07922775795b37e09350f8a0deaacab
| 91
|
py
|
Python
|
src/onegov/agency/theme/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/agency/theme/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/agency/theme/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from onegov.agency.theme.agency_theme import AgencyTheme
__all__ = (
'AgencyTheme',
)
| 15.166667
| 56
| 0.747253
|
4a056421ca9a3ec3d461ab23d643f31cd3e9161a
| 655
|
py
|
Python
|
backend/hekshermgmt/api/v1/utils.py
|
biocatchltd/hekshermgmt
|
651da54cd88c463da9f9bd99446a9024d658e76f
|
[
"MIT"
] | 1
|
2021-02-22T06:47:55.000Z
|
2021-02-22T06:47:55.000Z
|
backend/hekshermgmt/api/v1/utils.py
|
biocatchltd/hekshermgmt
|
651da54cd88c463da9f9bd99446a9024d658e76f
|
[
"MIT"
] | 18
|
2021-05-05T14:33:51.000Z
|
2022-03-29T08:24:55.000Z
|
backend/hekshermgmt/api/v1/utils.py
|
biocatchltd/hekshermgmt
|
651da54cd88c463da9f9bd99446a9024d658e76f
|
[
"MIT"
] | null | null | null |
import httpx
from fastapi import Depends, Header, Request
from starlette.responses import Response
from hekshermgmt.context_vars import user
@Depends
def application(request: Request):
"""
A helper dependancy to get the app instance
"""
return request.app
@Depends
async def get_user_name(x_forwarded_email: str = Header(...)) -> str:
"""
Extracts email from header and sets it into the context var.
"""
user.set(x_forwarded_email)
return x_forwarded_email
def httpx_error_to_response(error: httpx.HTTPStatusError) -> Response:
return Response(error.response.content, status_code=error.response.status_code)
| 24.259259
| 83
| 0.746565
|
4a056630a7fc20ab6624204ce67b073f7fd2d113
| 4,072
|
py
|
Python
|
opendomain_responder/basic_qa.py
|
maverickjoy/pepper-robot-facedetection-open-domain-answering
|
e17f4421eddf2334873261711d3cd132e90d1117
|
[
"MIT"
] | 7
|
2019-03-13T11:10:47.000Z
|
2022-02-20T11:14:26.000Z
|
opendomain_responder/basic_qa.py
|
maverickjoy/pepper-robot-facedetection-open-domain-answering
|
e17f4421eddf2334873261711d3cd132e90d1117
|
[
"MIT"
] | 9
|
2019-07-07T09:09:06.000Z
|
2022-03-11T23:39:58.000Z
|
opendomain_responder/basic_qa.py
|
maverickjoy/pepper-robot-facedetection-open-domain-answering
|
e17f4421eddf2334873261711d3cd132e90d1117
|
[
"MIT"
] | 1
|
2022-03-03T13:25:07.000Z
|
2022-03-03T13:25:07.000Z
|
import re
import json
import requests
import mechanize
from bs4 import BeautifulSoup
from mathsolver import mathsolver
from geo_locator import GeoLocator
# INITIALISING AUTO ANSWER BOT
br = mechanize.Browser()
br.set_handle_equiv(True)
br.set_handle_redirect(True)
br.set_handle_referer(True)
br.set_handle_robots(False)
br.set_handle_refresh(False)
br.addheaders = [('User-agent', 'Firefox')]
climate_list = ['climate', 'temp', 'temperature',
'weather', 'hot', 'cold', 'humidity', 'rainy', 'rain']
# Weather API Initialisation
# base_url variable to store url
base_url = "http://api.openweathermap.org/data/2.5/weather?"
api_key = "AP_KEY_GET_FROM_OPENWEATHERMAP_FOR_FREE"
def answerQues(ques):
ans = ""
try:
net_detect = 0
response = br.open('https://www.google.co.in')
net_detect = 1
br.select_form(nr=0)
br.form['q'] = ques
br.submit()
src_code = br.response().read()
def _checkWeatherQuestion(question):
question = re.sub(r'[?|$|.|!]', r'', question)
question = re.sub(r'[^a-zA-Z0-9 ]', r'', question)
for ele in climate_list:
if ele in question.lower():
return True
return False
def _findTemperature(question):
forecast = "Sorry I'm presently unable to get weather information at the moment"
places = GeoLocator(question)
city_name = 'Pune' # default
if len(places.cities) > 0:
city_name = places.cities[0]
complete_url = base_url + "appid=" + api_key + "&q=" + city_name
response = requests.get(complete_url)
x = response.json()
if x["cod"] != "404":
y = x["main"]
current_temperature = y["temp"]
current_pressure = y["pressure"]
current_humidiy = y["humidity"]
z = x["weather"]
weather_description = z[0]["description"]
forecast = "Presently in {} it is {} with {} degree Centigrade and humidity percentage is {}".format(
city_name, str(weather_description), str(current_temperature - 273), str(current_humidiy))
return forecast
def _whois():
answer = re.search('<span>(.*)', src_code)
answer = answer.group()[:400]
if 'wiki' in answer:
answer = re.search('<span>(.*)<a', answer).group(1)
else:
answer = re.search(
'<span>(.*)</span></div></div><div', answer).group(1)
return answer
def _whatis():
spg = 1
answer = re.search('"_sPg">(.*)', src_code)
if answer == None:
answer = re.search('<ol><li>(.*)', src_code)
spg = 0
if answer == None:
return _whois()
else:
answer = answer.group()[:400]
if '<b>' in answer:
answer = answer.replace('<b>', '')
answer = answer.replace('</b>', '')
if spg:
answer = re.search(
'"_sPg">(.*)</div></div><div', answer).group(1)
else:
answer = re.search('<ol><li>(.*)</li>', answer).group(1)
return answer
# Get Result For calculation question
res = mathsolver.solve(ques)
if res[0]: # Check Status
ans = res[1]
elif _checkWeatherQuestion(ques):
ans = _findTemperature(ques)
elif 'who' in ques:
ans = _whois()
else:
ans = _whatis()
except Exception as err:
print "Cannot find answer segment : ", err
ans = str(BeautifulSoup(ans, "html.parser").text)
return ans
if __name__ == "__main__":
while True:
print "Please enter your question"
ques = raw_input('Ques : ')
ans = answerQues(ques)
print "> ", ans
| 31.8125
| 117
| 0.527996
|
4a05669fa21d435c8128d5d8f1323890199668b3
| 13,314
|
py
|
Python
|
src/hiding_adversarial_attacks/manipulation/utils.py
|
inovex/hiding-adversarial-attacks
|
d00ce66349e8fd9c27c6207289ee73c23f1b3f99
|
[
"MIT"
] | 1
|
2022-01-14T16:07:01.000Z
|
2022-01-14T16:07:01.000Z
|
src/hiding_adversarial_attacks/manipulation/utils.py
|
inovex/hiding-adversarial-attacks
|
d00ce66349e8fd9c27c6207289ee73c23f1b3f99
|
[
"MIT"
] | null | null | null |
src/hiding_adversarial_attacks/manipulation/utils.py
|
inovex/hiding-adversarial-attacks
|
d00ce66349e8fd9c27c6207289ee73c23f1b3f99
|
[
"MIT"
] | 1
|
2022-01-14T16:07:06.000Z
|
2022-01-14T16:07:06.000Z
|
import math
import os
from functools import partial
import pandas as pd
import torch
from matplotlib import pyplot as plt
from piqa import SSIM
from torch._vmap_internals import vmap
from torchmetrics.functional import mean_squared_error
from hiding_adversarial_attacks.classifiers.cifar_net import CifarNet
from hiding_adversarial_attacks.classifiers.fashion_mnist_net import FashionMNISTNet
from hiding_adversarial_attacks.classifiers.mnist_net import MNISTNet
from hiding_adversarial_attacks.config.attack.adversarial_attack_config import (
ALL_CLASSES,
)
from hiding_adversarial_attacks.config.data_sets.data_set_config import (
AdversarialDataSetNames,
)
from hiding_adversarial_attacks.config.losses.similarity_loss_config import (
SimilarityLossNames,
)
from hiding_adversarial_attacks.config.manipulated_model_training_config import (
ManipulatedModelTrainingConfig,
)
from hiding_adversarial_attacks.custom_metrics.pearson_corrcoef import (
custom_pearson_corrcoef,
)
from hiding_adversarial_attacks.manipulation.manipulated_cifar_net import (
ManipulatedCIFARNet,
)
from hiding_adversarial_attacks.manipulation.manipulated_fashion_mnist_net import (
ManipulatedFashionMNISTNet,
)
from hiding_adversarial_attacks.manipulation.manipulated_mnist_net import (
ManipulatedMNISTNet,
)
from hiding_adversarial_attacks.manipulation.metricized_explanations import (
MetricizedTopAndBottomExplanations,
)
from hiding_adversarial_attacks.visualization.config import data_set_mappings
from hiding_adversarial_attacks.visualization.data_set_images import (
visualize_difference_image_np,
)
from hiding_adversarial_attacks.visualization.explanations import (
interpolate_explanations,
visualize_single_explanation,
)
from hiding_adversarial_attacks.visualization.helpers import tensor_to_pil_numpy
from hiding_adversarial_attacks.visualization.normalization import normalize_to_range
def load_explanations(config, device: torch.device, stage: str = "training"):
(orig_expl, orig_labels, orig_indices,) = torch.load(
os.path.join(config.explanations_path, f"{stage}_orig_exp.pt"),
map_location=device,
)
adv_expl, adv_labels, adv_indices = torch.load(
os.path.join(config.explanations_path, f"{stage}_adv_exp.pt"),
map_location=device,
)
return (
orig_expl,
orig_labels,
orig_indices,
adv_expl,
adv_labels,
adv_indices,
)
def load_attacked_data(data_path: str, device: torch.device, stage: str = "training"):
orig_images, orig_labels = torch.load(
os.path.join(data_path, f"{stage}_orig.pt"),
map_location=device,
)
adversarial_images, adversarial_labels = torch.load(
os.path.join(data_path, f"{stage}_adv.pt"),
map_location=device,
)
return (
orig_images,
orig_labels,
adversarial_images,
adversarial_labels,
)
def filter_included_classes(
training_adv_expl,
training_adv_images,
training_adv_indices,
training_adv_labels,
training_orig_expl,
training_orig_images,
training_orig_indices,
training_orig_labels,
config,
device,
):
mask = torch.zeros(len(training_orig_labels), dtype=torch.bool, device=device)
for included_class in config.included_classes:
mask += training_orig_labels == included_class
training_orig_expl = training_orig_expl[mask]
training_orig_labels = training_orig_labels[mask]
training_orig_indices = training_orig_indices[mask]
training_adv_expl = training_adv_expl[mask]
training_adv_labels = training_adv_labels[mask]
training_adv_indices = training_adv_indices[mask]
training_orig_images = training_orig_images[mask]
training_adv_images = training_adv_images[mask]
return (
training_adv_expl,
training_adv_images,
training_adv_labels,
training_orig_expl,
training_orig_images,
training_orig_labels,
)
def get_top_and_bottom_k_indices(
similarities: torch.Tensor, k: int = 4, reverse: bool = False
):
t_sim, top_indices = torch.topk(similarities, k=k)
# smallest similarity value
b_sim, bottom_indices = torch.topk(similarities, k=k, largest=False)
if reverse:
top_indices = torch.flip(top_indices, dims=(0,))
return bottom_indices.long(), top_indices.long()
bottom_indices = torch.flip(bottom_indices, dims=(0,))
return top_indices.long(), bottom_indices.long()
def get_metricized_top_and_bottom_explanations(
config: ManipulatedModelTrainingConfig, device: torch.device
) -> MetricizedTopAndBottomExplanations:
(
training_orig_images,
training_orig_expl,
training_orig_labels,
training_adv_images,
training_adv_expl,
training_adv_labels,
) = load_filtered_data(config, device, stage="training")
reverse, similarities = get_similarities(
config.similarity_loss.name, training_orig_expl, training_adv_expl
)
top_indices, bottom_indices = get_top_and_bottom_k_indices(
similarities, k=4, reverse=reverse
)
top_bottom_indices = torch.cat((top_indices, bottom_indices), dim=0)
# Plot similarity loss distribution on all training samples
df_similarities = pd.DataFrame(similarities.cpu().detach().numpy())
df_similarities.hist(bins=20, log=True)
plt.show()
image_shape = training_orig_images.shape[-2], training_orig_images.shape[-1]
train_img_top = tensor_to_pil_numpy(training_orig_images[top_bottom_indices])
train_expl_top = tensor_to_pil_numpy(
interpolate_explanations(training_orig_expl[top_bottom_indices], image_shape)
)
train_adv_top = tensor_to_pil_numpy(training_adv_images[top_bottom_indices])
train_adv_expl_top = tensor_to_pil_numpy(
interpolate_explanations(training_adv_expl[top_bottom_indices], image_shape)
)
# Visualize explanations
visualize_single_explanation(
train_img_top[0],
train_expl_top[0],
f"Orig label: {training_orig_labels[top_bottom_indices][0]}",
display_figure=True,
)
visualize_single_explanation(
train_adv_top[0],
train_adv_expl_top[0],
f"Adv label: {training_adv_labels[top_bottom_indices][0]}",
display_figure=True,
)
# Visualize difference images
visualize_difference_image_np(
train_adv_expl_top[0],
train_expl_top[0],
title="Explanation diff: adv vs. orig",
)
visualize_difference_image_np(
train_img_top[0], train_adv_top[0], title="Image diff: adv vs. orig"
)
visualize_single_explanation(
train_img_top[-1],
train_expl_top[-1],
f"Orig label: {training_orig_labels[top_bottom_indices][-1]}",
display_figure=True,
)
visualize_single_explanation(
train_adv_top[-1],
train_adv_expl_top[-1],
f"Adv label: {training_adv_labels[top_bottom_indices][-1]}",
display_figure=True,
)
# Visualize difference images
visualize_difference_image_np(
train_adv_expl_top[-1],
train_expl_top[-1],
title="Explanation diff: adv vs. orig",
)
visualize_difference_image_np(
train_img_top[-1], train_adv_top[-1], title="Image diff: adv vs. orig"
)
label_mapping = data_set_mappings[config.data_set.name]
orig_label_names = [
label_mapping[int(label_id)]
for label_id in training_orig_labels[top_bottom_indices].long()
]
adv_label_names = [
label_mapping[int(label_id)]
for label_id in training_adv_labels[top_bottom_indices].long()
]
metricized_top_and_bottom_explanations = MetricizedTopAndBottomExplanations(
device=device,
sorted_by=config.similarity_loss.name,
top_and_bottom_indices=top_bottom_indices,
top_and_bottom_original_images=training_orig_images[top_bottom_indices],
top_and_bottom_original_explanations=training_orig_expl[top_bottom_indices],
top_and_bottom_original_labels=training_orig_labels[top_bottom_indices].long(),
top_and_bottom_original_label_names=orig_label_names,
top_and_bottom_adversarial_images=training_adv_images[top_bottom_indices],
top_and_bottom_adversarial_explanations=training_adv_expl[top_bottom_indices],
top_and_bottom_adversarial_labels=training_adv_labels[
top_bottom_indices
].long(),
top_and_bottom_adversarial_label_names=adv_label_names,
)
del training_orig_images
del training_orig_expl
del training_orig_labels
del training_adv_images
del training_adv_expl
del training_adv_labels
return metricized_top_and_bottom_explanations
def get_similarities(similarity_loss_name, orig_explanations, adv_explanations):
reverse = False
if similarity_loss_name == SimilarityLossNames.MSE:
similarity_loss = mean_squared_error
batched_sim_loss = vmap(similarity_loss)
similarities = batched_sim_loss(orig_explanations, adv_explanations)
reverse = True
if similarity_loss_name == SimilarityLossNames.SSIM:
batched_sim_loss = SSIM(
window_size=5, sigma=0.3, reduction="none", n_channels=1
)
if orig_explanations.is_cuda:
batched_sim_loss = batched_sim_loss.cuda()
orig_explanations = normalize_to_range(orig_explanations, 0, 1)
adv_explanations = normalize_to_range(adv_explanations, 0, 1)
if len(orig_explanations) > 10000:
similarities = torch.tensor([], device=orig_explanations.device)
orig_expl = torch.split(
orig_explanations, math.ceil(len(orig_explanations) / 10), dim=0
)
adv_expl = torch.split(
adv_explanations, math.ceil(len(adv_explanations) / 10), dim=0
)
for orig_exp, adv_exp in zip(orig_expl, adv_expl):
orig_exp = orig_exp.float()
adv_exp = adv_exp.float()
sim = batched_sim_loss(orig_exp, adv_exp)
similarities = torch.cat((similarities, sim), dim=0)
else:
if (
orig_explanations.dtype == torch.float64
and adv_explanations.dtype == torch.float64
):
orig_explanations = orig_explanations.float()
adv_explanations = adv_explanations.float()
similarities = batched_sim_loss(orig_explanations, adv_explanations)
if similarity_loss_name == SimilarityLossNames.PCC:
similarity_loss = custom_pearson_corrcoef # batched version of PCC in [-1, 1]
batched_sim_loss = partial(similarity_loss)
similarities = batched_sim_loss(orig_explanations, adv_explanations)
return reverse, similarities
def load_filtered_data(config, device, stage: str = "training"):
(
orig_expl,
orig_labels,
orig_indices,
adv_expl,
adv_labels,
adv_indices,
) = load_explanations(config, device, stage=stage)
(
orig_images,
_,
adv_images,
_,
) = load_attacked_data(config.explanations_path, device, stage=stage)
# filter attacked data by included_classes
if ALL_CLASSES not in config.included_classes:
(
adv_expl,
adv_images,
adv_labels,
orig_expl,
orig_images,
orig_labels,
) = filter_included_classes(
adv_expl,
adv_images,
adv_indices,
adv_labels,
orig_expl,
orig_images,
orig_indices,
orig_labels,
config,
device,
)
return (
orig_images,
orig_expl,
orig_labels,
adv_images,
adv_expl,
adv_labels,
)
def get_manipulatable_model(config):
if config.data_set.name == AdversarialDataSetNames.ADVERSARIAL_MNIST:
classifier_model = MNISTNet(config).load_from_checkpoint(
config.classifier_checkpoint
)
model = ManipulatedMNISTNet(classifier_model, config)
return model
if config.data_set.name == AdversarialDataSetNames.ADVERSARIAL_FASHION_MNIST:
classifier_model = FashionMNISTNet(config).load_from_checkpoint(
config.classifier_checkpoint
)
model = ManipulatedFashionMNISTNet(classifier_model, config)
return model
if config.data_set.name == AdversarialDataSetNames.ADVERSARIAL_FASHION_MNIST_EXPL:
classifier_model = FashionMNISTNet(config).load_from_checkpoint(
config.classifier_checkpoint
)
model = ManipulatedFashionMNISTNet(classifier_model, config)
return model
if config.data_set.name in [
AdversarialDataSetNames.ADVERSARIAL_CIFAR10,
AdversarialDataSetNames.ADVERSARIAL_CIFAR10_EXPL,
]:
classifier_model = CifarNet(config).load_from_checkpoint(
config.classifier_checkpoint
)
model = ManipulatedCIFARNet(classifier_model, config)
return model
else:
raise SystemExit(
f"Unknown data set specified: {config.data_set.name}. Exiting."
)
| 35.69437
| 87
| 0.709479
|
4a056762beacdb642be484a0bd9db9cc29b95913
| 104
|
py
|
Python
|
deepspeed/ops/transformer/__init__.py
|
bratao/DeepSpeed
|
c50d8955e942e5e26cf81835d59ec3f20ef8540d
|
[
"MIT"
] | 1
|
2020-09-25T13:54:15.000Z
|
2020-09-25T13:54:15.000Z
|
deepspeed/ops/transformer/__init__.py
|
bratao/DeepSpeed
|
c50d8955e942e5e26cf81835d59ec3f20ef8540d
|
[
"MIT"
] | null | null | null |
deepspeed/ops/transformer/__init__.py
|
bratao/DeepSpeed
|
c50d8955e942e5e26cf81835d59ec3f20ef8540d
|
[
"MIT"
] | 1
|
2020-09-13T08:06:51.000Z
|
2020-09-13T08:06:51.000Z
|
from deepspeed.ops.transformer.transformer import DeepSpeedTransformerLayer, DeepSpeedTransformerConfig
| 52
| 103
| 0.913462
|
4a0569a4c2f43d79dedc15f722511e97e20adf8b
| 2,931
|
py
|
Python
|
tbx/core/migrations/0135_auto_20210728_1218.py
|
elviva404/wagtail-torchbox
|
718d9e2c4337073f010296932d369c726a01dbd3
|
[
"MIT"
] | 103
|
2015-02-24T17:58:21.000Z
|
2022-03-23T08:08:58.000Z
|
tbx/core/migrations/0135_auto_20210728_1218.py
|
elviva404/wagtail-torchbox
|
718d9e2c4337073f010296932d369c726a01dbd3
|
[
"MIT"
] | 145
|
2015-01-13T17:13:43.000Z
|
2022-03-29T12:56:20.000Z
|
tbx/core/migrations/0135_auto_20210728_1218.py
|
elviva404/wagtail-torchbox
|
718d9e2c4337073f010296932d369c726a01dbd3
|
[
"MIT"
] | 57
|
2015-01-03T12:00:37.000Z
|
2022-02-09T13:11:30.000Z
|
# Generated by Django 2.2.17 on 2021-07-28 11:18
from django.db import migrations, models
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
("torchbox", "0134_remove_googleadgrantspage_and_related_models"),
]
operations = [
migrations.RemoveField(
model_name="globalsettings", name="bristol_address_link",
),
migrations.RemoveField(
model_name="globalsettings", name="bristol_address_svg",
),
migrations.RemoveField(
model_name="globalsettings", name="cambridge_address_link",
),
migrations.RemoveField(
model_name="globalsettings", name="cambridge_address_svg",
),
migrations.RemoveField(model_name="globalsettings", name="contact_email",),
migrations.RemoveField(model_name="globalsettings", name="contact_telephone",),
migrations.RemoveField(model_name="globalsettings", name="contact_twitter",),
migrations.RemoveField(
model_name="globalsettings", name="email_newsletter_teaser",
),
migrations.RemoveField(
model_name="globalsettings", name="oxford_address_link",
),
migrations.RemoveField(model_name="globalsettings", name="oxford_address_svg",),
migrations.RemoveField(model_name="globalsettings", name="us_address_link",),
migrations.RemoveField(model_name="globalsettings", name="us_address_svg",),
migrations.AlterField(
model_name="globalsettings",
name="bristol_address",
field=wagtail.core.fields.RichTextField(
blank=True, help_text="Full address"
),
),
migrations.AlterField(
model_name="globalsettings",
name="bristol_address_title",
field=models.CharField(
blank=True, help_text="Full address", max_length=255
),
),
migrations.AlterField(
model_name="globalsettings",
name="oxford_address",
field=wagtail.core.fields.RichTextField(
blank=True, help_text="Full address"
),
),
migrations.AlterField(
model_name="globalsettings",
name="oxford_address_title",
field=models.CharField(
blank=True, help_text="Full address", max_length=255
),
),
migrations.AlterField(
model_name="globalsettings",
name="us_address",
field=wagtail.core.fields.RichTextField(
blank=True, help_text="Full address"
),
),
migrations.AlterField(
model_name="globalsettings",
name="us_address_title",
field=models.CharField(
blank=True, help_text="Full address", max_length=255
),
),
]
| 36.185185
| 88
| 0.603889
|
4a0569a671819f31028115bd0cdab50572740a39
| 772
|
py
|
Python
|
pyairwatch/mam/vpp.py
|
llxp/PyVMwareAirWatch
|
5953f3f21b0fece20f2ec027fef42d8a3eb29de1
|
[
"MIT"
] | 2
|
2021-04-20T03:41:03.000Z
|
2021-09-23T10:56:11.000Z
|
pyairwatch/mam/vpp.py
|
llxp/PyVMwareAirWatch
|
5953f3f21b0fece20f2ec027fef42d8a3eb29de1
|
[
"MIT"
] | null | null | null |
pyairwatch/mam/vpp.py
|
llxp/PyVMwareAirWatch
|
5953f3f21b0fece20f2ec027fef42d8a3eb29de1
|
[
"MIT"
] | 1
|
2020-11-10T17:27:10.000Z
|
2020-11-10T17:27:10.000Z
|
from .mam import MAM
class VPP(MAM):
"""
A class to manage Internal Applications
"""
def __init__(self, client):
MAM.__init__(self, client)
def get_vpp_details(self, application_id):
path = '/apps/purchased/{}'.format(application_id)
header = {'Content-Type': 'application/json;version=2'}
return MAM._get(self, path=path, header=header , version=2)
def search(self, **kwargs):
"""
Search for VPP application details, its assignments, and deployment parameters.
:param kwargs:
:return:
"""
return MAM._get(self, path='/apps/purchased/search', params=kwargs)
# def search_by_atl_id(self, search_by, value):
# return self.search(search_by, str(value))
| 28.592593
| 87
| 0.629534
|
4a0569ef9a59f96dd90392ec504ef74acc14a751
| 362
|
py
|
Python
|
assignment3/assignment3_p1/kaggle_submission.py
|
yutong-xie/CS498-DL-Assignment
|
a0c93422c31a19ece7abbd2a7bb19f7feb8ea5ef
|
[
"MIT"
] | null | null | null |
assignment3/assignment3_p1/kaggle_submission.py
|
yutong-xie/CS498-DL-Assignment
|
a0c93422c31a19ece7abbd2a7bb19f7feb8ea5ef
|
[
"MIT"
] | null | null | null |
assignment3/assignment3_p1/kaggle_submission.py
|
yutong-xie/CS498-DL-Assignment
|
a0c93422c31a19ece7abbd2a7bb19f7feb8ea5ef
|
[
"MIT"
] | null | null | null |
import os
import csv
import numpy as np
def write_csv(file_path, y_list):
solution_rows = [('id', 'category')] + [(i, 1-y) for (i, y) in enumerate(y_list)]
with open(file_path, 'w') as f:
writer = csv.writer(f)
writer.writerows(solution_rows)
def output_submission_csv(output_file_path, y_test):
write_csv(output_file_path, y_test)
| 27.846154
| 85
| 0.687845
|
4a056b38e1402af574d5406955e859935f70401a
| 1,590
|
py
|
Python
|
scripts/get_es_child_documents.py
|
svebk/qpr-winter-2017
|
3cf6eead549481591a1d83738af0e56a0ceeae56
|
[
"MIT"
] | null | null | null |
scripts/get_es_child_documents.py
|
svebk/qpr-winter-2017
|
3cf6eead549481591a1d83738af0e56a0ceeae56
|
[
"MIT"
] | null | null | null |
scripts/get_es_child_documents.py
|
svebk/qpr-winter-2017
|
3cf6eead549481591a1d83738af0e56a0ceeae56
|
[
"MIT"
] | null | null | null |
import certifi
import json
import os
import sys
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search, Q
def search(obj_parents_l, es):
return Search() \
.using(es) \
.filter(Q('match',
obj_parent=json.dumps(obj_parents_l)))\
.scan()
if __name__ == '__main__':
"""
Returns the Elasticsearch documents whose parents are in sys.argv.
This is useful for retrieving image documents given a series of
ad document ids.
"""
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'es_config.json')
if not os.path.isfile(config_file):
print 'Configuration file not found.'
sys.exit(1)
elif len(sys.argv) < 2:
print 'Usage: get_es_child_documents.py ad_cdr_id...'
sys.exit(1)
else:
with open(config_file, 'rb') as infile:
config = json.load(infile)
es = Elasticsearch(config['url'],
http_auth=(config['user'], config['password']),
use_ssl=True,
verify_certs=True,
ca_certs=certifi.where())
cdr_ids = sys.argv[1:]
try:
# connection timeout gets printed out, from where?
results = search(json.dumps(cdr_ids), es)
for result in results:
out = json.dumps(result.to_dict())
if not out.startswith('ConnectionTimeout'):
print(out)
except Exception as e:
print str(e)
sys.exit(5)
| 28.909091
| 75
| 0.571698
|
4a056b8569c2831a535aed7e91b2fe08a9b02b7a
| 3,162
|
py
|
Python
|
create_yml.py
|
emo-bon/pipeline-v5
|
bd500bd380ea96f7d2872f3843c57362109375b4
|
[
"Apache-2.0"
] | 1
|
2022-03-10T15:28:51.000Z
|
2022-03-10T15:28:51.000Z
|
create_yml.py
|
emo-bon/pipeline-v5
|
bd500bd380ea96f7d2872f3843c57362109375b4
|
[
"Apache-2.0"
] | null | null | null |
create_yml.py
|
emo-bon/pipeline-v5
|
bd500bd380ea96f7d2872f3843c57362109375b4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import argparse
from ruamel.yaml import YAML
import os
# RAW_READS_ANALYSIS = "raw-reads"
# ASSEMBLY_ANALYSIS = "assembly"
# AMPLICON_ANALYSIS = "amplicon"
db_fields = [
"ssu_db",
"lsu_db",
"ssu_tax",
"lsu_tax",
"ssu_otus",
"lsu_otus",
"rfam_models",
"rfam_model_clans",
]
def db_dir(db_path, yaml_path):
"""Append databases path to values in template yaml"""
if not db_path.endswith("/"):
db_path += "/"
with open(yaml_path) as f:
yaml = YAML(typ="safe")
doc = yaml.load(f)
for db_field in db_fields:
if isinstance(doc[db_field], (list, tuple)):
for el in doc[db_field]:
el["path"] = os.path.join(db_path, el["path"])
else:
doc[db_field]["path"] = os.path.join(db_path, doc[db_field]["path"])
return doc
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Create the input.yml for the pipeline"
)
parser.add_argument(
"-y", "--yml", dest="yml", help="YAML file with the constants", required=True
)
parser.add_argument(
"-f", "--fr", dest="fr", help="forward reads file path", required=False
)
parser.add_argument(
"-r", "--rr", dest="rr", help="reverse reads file path", required=False
)
parser.add_argument(
"-o", "--output", dest="output", help="Output yaml file path", required=True
)
parser.add_argument(
"-d",
"--dbdir",
dest="db_dir",
help="Path to database directory",
required=False,
)
parser.add_argument(
"-q",
"--qc_rna_predct",
help="Quality control step for the case of rna prediction",
required=False,
)
parser.add_argument(
"-a",
"--assembly",
help="Assembly of the pre-processed reads using MEGAHIT",
required=False,
)
args = parser.parse_args()
print(f"Loading the constants from {args.yml}.")
# load template yml file and append database path
template_yml = db_dir(args.db_dir, args.yml)
paired_reads = [args.fr.split("/")[-1].split(".fastq.gz")[0], args.rr.split("/")[-1].split(".fastq.gz")[0]]
paired_reads_names = '"' + paired_reads[0] + '", "' + paired_reads[1] + '"'
print("paired_reads: ", paired_reads)
print("paired_reads_names: ", paired_reads_names)
with open(args.output, "w") as output_yml:
print("---------> Write .yml file.")
yaml = YAML(typ="safe")
template_yml["forward_reads"] = {
"class": "File",
"format": "edam:format_1930",
"path": args.fr,
}
template_yml["reverse_reads"] = {
"class": "File",
"format": "edam:format_1930",
"path": args.rr,
}
if args.qc_rna_predct == "false":
template_yml["run_qc_rna_predict"] = False
if args.assembly == "false":
template_yml["assembly"] = False
yaml.dump(template_yml, output_yml)
print("<--------- the .yml is now done")
| 26.571429
| 111
| 0.56167
|
4a056ba798de61df36e57b7b06576c8016c1f487
| 4,843
|
py
|
Python
|
python/ray/tests/test_component_failures_2.py
|
AshHarvey/ray
|
f35339b5ff3d5e85c20720637e28bd5a380a545e
|
[
"Apache-2.0"
] | null | null | null |
python/ray/tests/test_component_failures_2.py
|
AshHarvey/ray
|
f35339b5ff3d5e85c20720637e28bd5a380a545e
|
[
"Apache-2.0"
] | null | null | null |
python/ray/tests/test_component_failures_2.py
|
AshHarvey/ray
|
f35339b5ff3d5e85c20720637e28bd5a380a545e
|
[
"Apache-2.0"
] | null | null | null |
import os
import signal
import sys
import time
import pytest
import ray
import ray.ray_constants as ray_constants
from ray.cluster_utils import Cluster
from ray.test_utils import RayTestTimeoutException, get_other_nodes
SIGKILL = signal.SIGKILL if sys.platform != "win32" else signal.SIGTERM
@pytest.fixture(params=[(1, 4), (4, 4)])
def ray_start_workers_separate_multinode(request):
num_nodes = request.param[0]
num_initial_workers = request.param[1]
# Start the Ray processes.
cluster = Cluster()
for _ in range(num_nodes):
cluster.add_node(num_cpus=num_initial_workers)
ray.init(address=cluster.address)
yield num_nodes, num_initial_workers
# The code after the yield will run as teardown code.
ray.shutdown()
cluster.shutdown()
def test_worker_failed(ray_start_workers_separate_multinode):
num_nodes, num_initial_workers = (ray_start_workers_separate_multinode)
@ray.remote
def get_pids():
time.sleep(0.25)
return os.getpid()
start_time = time.time()
pids = set()
while len(pids) < num_nodes * num_initial_workers:
new_pids = ray.get([
get_pids.remote()
for _ in range(2 * num_nodes * num_initial_workers)
])
for pid in new_pids:
pids.add(pid)
if time.time() - start_time > 60:
raise RayTestTimeoutException(
"Timed out while waiting to get worker PIDs.")
@ray.remote
def f(x):
time.sleep(0.5)
return x
# Submit more tasks than there are workers so that all workers and
# cores are utilized.
object_refs = [f.remote(i) for i in range(num_initial_workers * num_nodes)]
object_refs += [f.remote(object_ref) for object_ref in object_refs]
# Allow the tasks some time to begin executing.
time.sleep(0.1)
# Kill the workers as the tasks execute.
for pid in pids:
os.kill(pid, SIGKILL)
time.sleep(0.1)
# Make sure that we either get the object or we get an appropriate
# exception.
for object_ref in object_refs:
try:
ray.get(object_ref)
except (ray.exceptions.RayTaskError, ray.exceptions.RayWorkerError):
pass
def _test_component_failed(cluster, component_type):
"""Kill a component on all worker nodes and check workload succeeds."""
# Submit many tasks with many dependencies.
@ray.remote
def f(x):
return x
@ray.remote
def g(*xs):
return 1
# Kill the component on all nodes except the head node as the tasks
# execute. Do this in a loop while submitting tasks between each
# component failure.
time.sleep(0.1)
worker_nodes = get_other_nodes(cluster)
assert len(worker_nodes) > 0
for node in worker_nodes:
process = node.all_processes[component_type][0].process
# Submit a round of tasks with many dependencies.
x = 1
for _ in range(1000):
x = f.remote(x)
xs = [g.remote(1)]
for _ in range(100):
xs.append(g.remote(*xs))
xs.append(g.remote(1))
# Kill a component on one of the nodes.
process.terminate()
time.sleep(1)
process.kill()
process.wait()
assert not process.poll() is None
# Make sure that we can still get the objects after the
# executing tasks died.
ray.get(x)
ray.get(xs)
def check_components_alive(cluster, component_type, check_component_alive):
"""Check that a given component type is alive on all worker nodes."""
worker_nodes = get_other_nodes(cluster)
assert len(worker_nodes) > 0
for node in worker_nodes:
process = node.all_processes[component_type][0].process
if check_component_alive:
assert process.poll() is None
else:
print("waiting for " + component_type + " with PID " +
str(process.pid) + "to terminate")
process.wait()
print("done waiting for " + component_type + " with PID " +
str(process.pid) + "to terminate")
assert not process.poll() is None
@pytest.mark.parametrize(
"ray_start_cluster", [{
"num_cpus": 8,
"num_nodes": 4,
"_system_config": {
"num_heartbeats_timeout": 100
},
}],
indirect=True)
def test_raylet_failed(ray_start_cluster):
cluster = ray_start_cluster
# Kill all raylets on worker nodes.
_test_component_failed(cluster, ray_constants.PROCESS_TYPE_RAYLET)
# The plasma stores should still be alive on the worker nodes.
check_components_alive(cluster, ray_constants.PROCESS_TYPE_PLASMA_STORE,
True)
if __name__ == "__main__":
import pytest
sys.exit(pytest.main(["-v", __file__]))
| 30.651899
| 79
| 0.644642
|
4a056d33eafa76c1a21a2ff4a77d72a8c6baa39f
| 2,418
|
py
|
Python
|
leetcode/String/929. Unique Email Addresses.py
|
yanshengjia/algorithm
|
0608d286be9c93d51768d47f21e569c6b0be9cda
|
[
"MIT"
] | 23
|
2019-08-02T12:02:47.000Z
|
2022-03-09T15:24:16.000Z
|
leetcode/String/929. Unique Email Addresses.py
|
yanshengjia/algorithm
|
0608d286be9c93d51768d47f21e569c6b0be9cda
|
[
"MIT"
] | null | null | null |
leetcode/String/929. Unique Email Addresses.py
|
yanshengjia/algorithm
|
0608d286be9c93d51768d47f21e569c6b0be9cda
|
[
"MIT"
] | 21
|
2019-12-22T04:47:32.000Z
|
2021-09-12T14:29:35.000Z
|
"""
Every email consists of a local name and a domain name, separated by the @ sign.
For example, in alice@leetcode.com, alice is the local name, and leetcode.com is the domain name.
Besides lowercase letters, these emails may contain '.'s or '+'s.
If you add periods ('.') between some characters in the local name part of an email address, mail sent there will be forwarded to the same address without dots in the local name. For example, "alice.z@leetcode.com" and "alicez@leetcode.com" forward to the same email address. (Note that this rule does not apply for domain names.)
If you add a plus ('+') in the local name, everything after the first plus sign will be ignored. This allows certain emails to be filtered, for example m.y+name@email.com will be forwarded to my@email.com. (Again, this rule does not apply for domain names.)
It is possible to use both of these rules at the same time.
Given a list of emails, we send one email to each address in the list. How many different addresses actually receive mails?
Example 1:
Input: ["test.email+alex@leetcode.com","test.e.mail+bob.cathy@leetcode.com","testemail+david@lee.tcode.com"]
Output: 2
Explanation: "testemail@leetcode.com" and "testemail@lee.tcode.com" actually receive mails
Solution:
simulation problem
"""
# time-O(n), where n is the length of list emails
# space-O(m), where m is the number of unique emails
class Solution:
def numUniqueEmails(self, emails: List[str]) -> int:
d = dict()
res = 0
for i in range(len(emails)):
s = emails[i].split('@')
local, domain = s[0], s[1]
local = local.replace('.', '')
local = local.split('+')[0]
emails[i] = '{}@{}'.format(local, domain)
if emails[i] not in d:
d[emails[i]] = 1
res += 1
else:
d[emails[i]] += 1
return res
# time-O(n), where n is the length of list emails
# space-O(m), where m is the number of unique emails
class Solution:
def numUniqueEmails(self, emails: List[str]) -> int:
d = set()
res = 0
for i in range(len(emails)):
local, domain = emails[i].split('@')
local = local.replace('.', '')
local = local.split('+')[0]
emails[i] = '{}@{}'.format(local, domain)
d.add(emails[i])
return len(d)
| 39
| 332
| 0.628619
|
4a056d6e5f3d685e3d74bd470be9eec8d995cc58
| 3,323
|
py
|
Python
|
user.py
|
myangelaku/Riplay
|
96a08de7239acd79cbc2a1ffc3482539ba54c2b5
|
[
"MIT"
] | null | null | null |
user.py
|
myangelaku/Riplay
|
96a08de7239acd79cbc2a1ffc3482539ba54c2b5
|
[
"MIT"
] | null | null | null |
user.py
|
myangelaku/Riplay
|
96a08de7239acd79cbc2a1ffc3482539ba54c2b5
|
[
"MIT"
] | null | null | null |
import requests
import errno
import os
import sys
import urllib.request
# Gets the Score Ids from Ripple's API then downloads the corresponding replays
def getReplays(username, mode):
url = "https://ripple.moe/api/v1/users/scores/best?name=" + username + "&mode=" + str(mode)
data = getJSON(url)
# Check if username directory exists, create if it doesn't.
newpath = os.getcwd() + "/" + username
if not os.path.exists(newpath):
os.makedirs(newpath)
# Download each score and store inside the username's folder
try:
for score in data['scores']:
songName = score['beatmap']['song_name']
scoreId = score['id']
# Replace any nasty characters in the file name
nastyCharacters = ["\/", "\\", "<", ">", "?", ":", "*", "|", "\""]
for char in nastyCharacters:
songName = songName.replace(char, " ")
# Specify file path
directory = os.path.join(os.getcwd() + "/" + username)
fullfilename = directory + "/" + username + " - " + songName + '.osr'
# Download Replay
try:
# Create Opener w/ headers
opener=urllib.request.build_opener()
opener.addheaders=[('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1941.0 Safari/537.36')]
urllib.request.install_opener(opener)
# URL & File path
url = 'https://ripple.moe/web/replays/' + str(scoreId)
local = str(fullfilename)
# Download
urllib.request.urlretrieve(url, local)
print("Downloading Replay: " + songName + ".osr...")
except Exception as e:
print("ERROR: Could not download file: " + songName + ".osr", e)
sys.exit(1)
print("Download Complete.")
return
except Exception as e:
print("\nCan't download replays because the user doesn't have any scores for this mode.", e)
sys.exit(1)
# Get Game Mode from the user
def getMode():
mode = input("\nSelect the game mode you'd like to download replays for\n1. osu!\n2. Taiko\n3. CTB\n4. Mania\n\nGame Mode: ")
mode = int(mode)
# Check for invalid mode
if mode < 1 or mode > 4:
print("\nInvalid choice given, please try again.")
getMode()
# Mode number for the Ripple API is 1 less than the options given in the mode input
return getReplays(username, mode - 1)
# Gets JSON then calls a given function afterwards
def getJSON(url):
try:
data = requests.get(url=url).json()
if data['code'] and data['code'] != 200:
print("Invalid request given, please try again\n")
sys.exit(1)
return data
except requests.exceptions.Timeout:
data = requests.get(url=url).json()
except requests.exceptions.TooManyRedirects:
print("Invalid link given")
except requests.exceptions.RequestException as e:
print (e)
sys.exit(1)
# Main Execution
username = input("Enter a Ripple username to start downloading replays: ")
url = 'https://ripple.moe/api/v1/users?name=' + username
userStats = getJSON(url)
getMode()
| 33.565657
| 160
| 0.589528
|
4a056d947d1c383b46133ea46c86d6a64e04794f
| 17,189
|
py
|
Python
|
src/wired/container.py
|
theasylum/wired
|
6b6a3e83702b18ebb41ca1f94e957bdf7e44986d
|
[
"MIT"
] | 12
|
2018-07-22T15:40:35.000Z
|
2020-12-27T21:39:18.000Z
|
src/wired/container.py
|
theasylum/wired
|
6b6a3e83702b18ebb41ca1f94e957bdf7e44986d
|
[
"MIT"
] | 36
|
2019-03-23T13:47:25.000Z
|
2020-11-28T18:08:14.000Z
|
src/wired/container.py
|
theasylum/wired
|
6b6a3e83702b18ebb41ca1f94e957bdf7e44986d
|
[
"MIT"
] | 6
|
2019-03-23T20:08:57.000Z
|
2021-06-03T16:52:06.000Z
|
import weakref
from zope.interface import Interface, implementedBy, providedBy
from zope.interface.interface import InterfaceClass
from zope.interface.interfaces import IInterface
from zope.interface.adapter import AdapterRegistry
__all__ = ['ServiceContainer', 'ServiceRegistry']
class Sentinel:
def __init__(self, name):
self.name = name
def __repr__(self):
return '<' + self.name + '>'
_marker = Sentinel('default')
class IServiceFactory(Interface):
""" A marker interface for service factories."""
class IServiceInstance(Interface):
""" A marker interface for service instances."""
class IContextFinalizer(Interface):
""" A marker interface for a finalizer invocable when a context dies."""
class ServiceFactoryInfo:
def __init__(self, factory, service_iface, context_iface, wants_context):
# Use the __wired_factory__ protocol if present
_factory = getattr(factory, '__wired_factory__', factory)
self.factory = _factory
self.service_iface = service_iface
self.context_iface = context_iface
self.wants_context = wants_context
class SingletonServiceWrapper:
def __init__(self, service):
self.service = service
def __call__(self, services):
return self.service
class ServiceCache:
"""
A per-context registry that avoids leaking memory when a context object
is garbage collected.
The goal of the cache is to keep any instantiated services alive for
``min(context_lifetime, self_lifetime)``.
"""
_AdapterRegistry = AdapterRegistry # for testing
def __init__(self, default=None):
self._default = None
self._contexts = {}
self._ref = weakref.ref(self)
def __del__(self):
# try to remove the finalizers from the contexts incase the context
# is still alive, there's no sense in having a weakref attached to it
# now that the cache is dead
for ctx_id, ctx_cache in self._contexts.items():
finalizer = ctx_cache.lookup(
(), IContextFinalizer, name='', default=_marker
)
if finalizer is not _marker: # pragma: no cover
finalizer.detach()
def find(self, context=_marker):
if context is _marker:
context = self._default
ctx_id = id(context)
return self._contexts.get(ctx_id, None)
def get(self, context=_marker):
if context is _marker:
context = self._default
contexts = self._contexts
ctx_id = id(context)
ctx_cache = contexts.get(ctx_id, None)
if ctx_cache is None:
ctx_cache = self._AdapterRegistry()
try:
finalizer = weakref.finalize(
context,
context_finalizer,
cache_ref=self._ref,
ctx_id=ctx_id,
)
except TypeError:
# not every type supports weakrefs, in which case we
# simply cannot release the ctx_cache early
pass
else:
finalizer.atexit = False
ctx_cache.register((), IContextFinalizer, '', finalizer)
contexts[ctx_id] = ctx_cache
return ctx_cache
def context_finalizer(cache_ref, ctx_id): # pragma: no cover
# if the context lives longer than self then remove it
# to avoid keeping any refs to the registry
cache = cache_ref()
if cache is not None and ctx_id in cache._contexts:
del cache._contexts[ctx_id]
class ServiceContainer:
"""
A service container is used to create service instances.
Create a container via :meth:`wired.ServiceRegistry.create_container`.
A container controls creating services from the registered factories.
Services are cached based on their registration constraints and re-used
when possible based on the context and requested interface.
"""
_ServiceCache = ServiceCache # for testing
def __init__(self, factories, cache=None, context=None):
if cache is None:
cache = self._ServiceCache(context)
self._factories = factories
self._cache = cache
self.context = context
def bind(self, *, context):
"""
Return a new container sharing the same cache but bound to ``context``.
"""
if context is self.context:
return self
return self.__class__(
factories=self._factories, cache=self._cache, context=context
)
def get(
self,
iface_or_type=Interface,
*,
context=_marker,
name='',
default=_marker
):
"""
Find a cached instance or create one from the registered factory.
The instance is found using the following algorithm:
1. Find an instance matching the criteria in the container. If one
is found, return it directly.
2. Search for a factory, first in the container and second on the
service registry. If one is not found, raise a ``LookupError`` or,
if specified, return ``default``.
3. Invoking the factory, cache the result in the container for later
lookups, and return the result.
:param iface_or_type: The registered service interface.
:param context: A context object. This object will be available as
``container.context`` in the invoked service factories and will
influence which factories are matched. Defaults to the bound
:attr:`.context` on the container.
:param str name: The registered name of the service.
:param default: A service instance to return if lookup fails.
"""
if context is not _marker and context is not self.context:
proxy = self.bind(context=context)
return proxy.get(iface_or_type, name=name, default=default)
context = self.context
iface = _iface_for_type(iface_or_type)
context_iface = providedBy(context)
cache = self._cache.get(context)
inst = cache.lookup(
(IServiceInstance, context_iface),
iface,
name=name,
default=_marker,
)
if inst is not _marker:
return inst
svc_info = None
# lookup in the local registry if it exists
factories = self._cache.find()
if factories is not None:
svc_info = _find_factory(factories, iface, context_iface, name)
# lookup in the global registry
if svc_info is None:
svc_info = _find_factory(
self._factories, iface, context_iface, name
)
if svc_info is None:
if default is not _marker:
return default
raise LookupError('could not find registered service factory')
# there is no service registered for this context, fallback
# to see if there is one registered for context=None by hiding
# the current context for the remainder of the lookup
if not svc_info.wants_context and context is not None:
proxy = self.bind(context=None)
return proxy.get(iface_or_type, name=name, default=default)
inst = svc_info.factory(self)
# make sure to register the service using the original, general
# context_iface, not the provided one as it may be more specific
cache.register(
(IServiceInstance, svc_info.context_iface),
svc_info.service_iface,
name,
inst,
)
return inst
def set(
self, service, iface_or_type=Interface, *, context=_marker, name=''
):
"""
Add a service instance to the container.
Upon success, ``service`` will be returned for matching lookups on
the same context.
If this service registration would affect a previously-cached lookup
then it will raise a ``ValueError``.
:param service: A service instance to cache.
:param iface_or_type: A class or ``zope.interface.Interface`` object
defining the interface of the service. Defaults to
``zope.interface.Interface`` to match any requested interface.
:param context: A context object. The ``service`` instance will be
cached for any later lookups using this context. Defaults to the
bound :attr:`.context` on the container.
:param str name: An identifier for the service.
"""
if context is _marker:
context = self.context
iface = _iface_for_type(iface_or_type)
context_iface = providedBy(context)
cache = self._cache.get(context)
inst = cache.lookup(
(IServiceInstance, context_iface),
iface,
name=name,
default=_marker,
)
if inst is not _marker:
raise ValueError(
'a service instance is already cached that would conflict '
'with this registration'
)
cache.register((IServiceInstance, context_iface), iface, name, service)
def register_factory(
self, factory, iface_or_type=Interface, *, context=None, name=''
):
"""
Register a service factory.
This factory will override any lookups defined in the service registry.
Otherwise the semantics are identical to
:meth:`.ServiceRegistry.register_factory`.
"""
iface = _iface_for_type(iface_or_type)
context_iface = _iface_for_context(context)
wants_context = context is not None
info = ServiceFactoryInfo(factory, iface, context_iface, wants_context)
factories = self._cache.get()
_register_factory(info, factories, iface, context_iface, name)
def register_singleton(
self, service, iface_or_type=Interface, *, context=None, name=''
):
"""
Register a singleton instance.
Functionally, the singleton is wrapped in a factory that always
returns the same instance when invoked. See
:meth:`.ServiceRegistry.register_factory` for information on the
parameters.
"""
service_factory = SingletonServiceWrapper(service)
return self.register_factory(
service_factory, iface_or_type, context=context, name=name
)
class ServiceRegistry:
"""
A service registry contains service factory definitions.
Define the tree of services your application needs once at config-time.
Later, per operation, invoke :meth:`.create_container` to create a new
service container which can be used to lazily instantiate service
objects on-demand.
Using this pattern, your code now depends on the container and your
service interfaces. You are now programming to an interface, not to a
specific implementation. It is now trivial to register a different
factory to mock out, or replace, specific service implementations in
tests or for any other purposes.
"""
_AdapterRegistry = AdapterRegistry # for testing
_ServiceContainer = ServiceContainer # for testing
def __init__(self, factory_registry=None):
if factory_registry is None:
factory_registry = self._AdapterRegistry()
self._factories = factory_registry
def create_container(self, *, context=None):
"""
Create a new :class:`.ServiceContainer` linked to the registry.
A container will use all the registered service factories,
independently of any other containers, in order to find and
instantiate service objects.
Practically, a new container should be derived per logical
"operation". An operation is something like a web request, job,
transaction, etc.
:param context: The container will be bound to a different context
object, affecting which factories are selected. By default,
the container is bound to the ``None`` context.
"""
return self._ServiceContainer(self._factories, context=context)
def register_factory(
self, factory, iface_or_type=Interface, *, context=None, name=''
):
"""
Register a service factory.
A factory should accept a single parameter which will be a
:class:`.ServiceContainer` instance. The factory should not be bound
to any particular container and should use the one passed in to find
service dependencies.
A factory can be registered for a particular type or interface, with
more specific factories allowed per type of ``context`` or by
``name`` string.
It is recommended to register factories using types/interfaces instead
of named strings, as they avoid naming clashes between independently
defined components/features. Types are always unique and are better
at expressing intent and contracts.
An example service factory:
.. code-block:: python
def login_factory(container):
dbsession = container.get(name='dbsession')
return LoginService(dbsession)
Notice in the above example that the ``login_factory`` requires
another service named ``dbsession`` to be registered which triggers a
recursive lookup for that service in order to create the
``LoginService`` instance.
It is not required that the returned service actually implements,
or is a subclass, of the defined ``iface``.
:param factory: A factory is a callable that accepts a container
argument and returns an instance of the service. Specifically,
``factory(services: ServiceContainer) -> iface``.
:param iface_or_type: A class or ``zope.interface.Interface`` object
defining the interface of the service. Defaults to
``zope.interface.Interface`` to match any requested interface.
:param context: A class or ``zope.interface.Interface`` object
defining the type of :attr:`.context` required in order to use
the factory. Defaults to ``None``.
:param str name: An identifier for the service. A factory can be
registered for an ``iface_or_type`` or a ``name`` or both, but an
``iface_or_type`` is recommended for most services.
"""
iface = _iface_for_type(iface_or_type)
context_iface = _iface_for_context(context)
wants_context = context is not None
info = ServiceFactoryInfo(factory, iface, context_iface, wants_context)
_register_factory(info, self._factories, iface, context_iface, name)
def register_singleton(
self, service, iface_or_type=Interface, *, context=None, name=''
):
"""
Register a singleton instance.
The singleton is global to all containers created from this registry.
Any container created by this registry will receive the same instance.
Functionally, the singleton is wrapped in a factory that always
returns the same instance when invoked. See :meth:`.register_factory`
for information on the parameters.
"""
service_factory = SingletonServiceWrapper(service)
return self.register_factory(
service_factory, iface_or_type, context=context, name=name
)
def find_factory(self, iface_or_type=Interface, *, context=None, name=''):
"""
Return the factory registered for the given parameters.
The arguments are the same as those used in :meth:`.register_factory`.
:returns: The registered factory (or singleton wrapper) or ``None``
if a factory cannot be found satisfying the constraints.
"""
iface = _iface_for_type(iface_or_type)
context_iface = _iface_for_context(context)
svc_info = _find_factory(self._factories, iface, context_iface, name)
if svc_info is not None:
return svc_info.factory
def _register_factory(info, factories, iface, context_iface, name):
factories.register((IServiceFactory, context_iface), iface, name, info)
def _find_factory(factories, iface, context_iface, name):
return factories.lookup(
(IServiceFactory, context_iface), iface, name=name, default=None
)
def _iface_for_type(obj):
# if the object is an interface then we can quit early
if IInterface.providedBy(obj):
return obj
# look for a cached iface
iface = obj.__dict__.get('_service_iface', None)
if iface is not None:
return iface
# make a new iface and cache it on the object
name = obj.__qualname__
iface = InterfaceClass(
'%s_%s_IService' % (name, id(obj)),
__doc__='service_factory generated interface',
)
obj._service_iface = iface
return iface
def _iface_for_context(obj):
if obj is None:
return Interface
elif not IInterface.providedBy(obj):
return implementedBy(obj)
return obj
| 35.008147
| 79
| 0.648962
|
4a056e27a438caaa38f259bf440f761365d2ab22
| 11,349
|
py
|
Python
|
sonnet/python/modules/base_info_test.py
|
ankitshah009/sonnet
|
a07676192c6d0f2ed5967d6bc367d62e55835baf
|
[
"Apache-2.0"
] | 3
|
2019-07-31T12:36:26.000Z
|
2020-12-16T14:37:19.000Z
|
sonnet/python/modules/base_info_test.py
|
ankitshah009/sonnet
|
a07676192c6d0f2ed5967d6bc367d62e55835baf
|
[
"Apache-2.0"
] | null | null | null |
sonnet/python/modules/base_info_test.py
|
ankitshah009/sonnet
|
a07676192c6d0f2ed5967d6bc367d62e55835baf
|
[
"Apache-2.0"
] | 3
|
2019-07-29T08:55:20.000Z
|
2019-07-30T06:36:56.000Z
|
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for sonnet.python.modules.base."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
from sonnet.python.modules import base
from sonnet.python.modules import base_info
from sonnet.python.modules import basic
import tensorflow as tf
nest = tf.contrib.framework.nest
logging = tf.logging
THIS_MODULE = "__main__"
LINEAR_MODULE = "sonnet.python.modules.basic"
DumbNamedTuple = collections.namedtuple("DumbNamedTuple", ("arg1", "arg2"))
class NotATensor(object):
pass
class DumbModule(base.AbstractModule):
"""Dumb module to test ModuleInfo."""
def __init__(self, name, no_nest=False):
base.AbstractModule.__init__(self, name=name)
self.no_nest = no_nest
def _build(self, inputs):
if isinstance(inputs, (NotATensor, tf.SparseTensor)):
outputs = inputs
else:
if self.no_nest:
outputs = inputs
else:
outputs = nest.map_structure(tf.identity, inputs)
return outputs
def _copy_default_graph():
# Save default graph into `meta_graph_def`.
meta_graph_def = tf.train.export_meta_graph()
# Reset default graph.
tf.reset_default_graph()
# Load default graph from `meta_graph_def`.
tf.train.import_meta_graph(meta_graph_def)
class ModuleInfoTest(tf.test.TestCase):
def testIsNamedTuple(self):
self.assertTrue(base_info._is_namedtuple(DumbNamedTuple(1, 2)))
self.assertFalse(base_info._is_namedtuple((1, 2, 3)))
self.assertFalse(base_info._is_namedtuple([1, 2, 3]))
self.assertFalse(base_info._is_namedtuple(NotATensor()))
def testIsIterable(self):
self.assertTrue(base_info._is_iterable((1, 2, 3)))
self.assertTrue(base_info._is_iterable([1, 2, 3]))
self.assertTrue(base_info._is_iterable({1: 1, 2: 2, 3: 3}))
self.assertTrue(base_info._is_iterable(
collections.OrderedDict([(1, 1), (2, 2)])))
self.assertTrue(base_info._is_iterable(DumbNamedTuple(1, 2)))
tensor = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
self.assertFalse(base_info._is_iterable(set([1, 2, 3])))
self.assertFalse(base_info._is_iterable(tensor))
sparse_tensor = tf.SparseTensor(
indices=tf.placeholder(dtype=tf.int64, shape=(10, 2,)),
values=tf.placeholder(dtype=tf.float32, shape=(10,)),
dense_shape=tf.placeholder(dtype=tf.int64, shape=(2,)))
self.assertFalse(base_info._is_iterable(sparse_tensor))
self.assertFalse(base_info._is_iterable(NotATensor()))
self.assertFalse(base_info._is_iterable("foo"))
def generator():
for count in xrange(3):
self.assertFalse(False)
yield count
self.assertFalse(base_info._is_iterable(generator))
def testModuleInfo_multiple_modules(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb")
dumb_1 = DumbModule(name="dumb")
linear = basic.Linear(10, name="linear")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb(ph_0)
with tf.name_scope("foo"):
dumb_1(ph_0)
linear(ph_0)
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
self.assertEqual(len(sonnet_collection), 3)
# item 0.
self.assertEqual(sonnet_collection[0].module_name, "dumb")
self.assertEqual(sonnet_collection[0].class_name,
"{}.DumbModule".format(THIS_MODULE))
self.assertEqual(sonnet_collection[0].scope_name, "dumb")
self.assertEqual(len(sonnet_collection[0].connected_subgraphs), 1)
self.assertEqual(
sonnet_collection[0].connected_subgraphs[0].name_scope, "dumb")
# item 1.
self.assertEqual(sonnet_collection[1].module_name, "dumb_1")
self.assertEqual(sonnet_collection[1].scope_name, "dumb_1")
self.assertEqual(sonnet_collection[1].class_name,
"{}.DumbModule".format(THIS_MODULE))
self.assertEqual(sonnet_collection[1].scope_name, "dumb_1")
self.assertEqual(len(sonnet_collection[1].connected_subgraphs), 1)
self.assertEqual(
sonnet_collection[1].connected_subgraphs[0].name_scope, "foo/dumb_1")
# item 2.
self.assertEqual(sonnet_collection[2].module_name, "linear")
self.assertEqual(sonnet_collection[2].scope_name, "linear")
self.assertEqual(sonnet_collection[2].class_name,
"{}.Linear".format(LINEAR_MODULE))
self.assertEqual(sonnet_collection[2].scope_name, "linear")
self.assertEqual(len(sonnet_collection[2].connected_subgraphs), 1)
self.assertEqual(
sonnet_collection[2].connected_subgraphs[0].name_scope, "linear")
check()
_copy_default_graph()
check()
def testModuleInfo_multiple_subgraph(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb(ph_0)
with tf.name_scope("foo"):
dumb(ph_0)
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
self.assertEqual(len(sonnet_collection), 1)
self.assertEqual(len(sonnet_collection[0].connected_subgraphs), 2)
connected_subgraph_0 = sonnet_collection[0].connected_subgraphs[0]
connected_subgraph_1 = sonnet_collection[0].connected_subgraphs[1]
self.assertEqual(connected_subgraph_0.name_scope, "dumb_a")
self.assertEqual(connected_subgraph_1.name_scope, "foo/dumb_a")
check()
_copy_default_graph()
check()
def testModuleInfo_tensor(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb(ph_0)
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"], tf.Tensor)
self.assertIsInstance(connected_subgraph.outputs, tf.Tensor)
check()
_copy_default_graph()
check()
def testModuleInfo_sparsetensor(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
sparse_tensor = tf.SparseTensor(
indices=tf.placeholder(dtype=tf.int64, shape=(10, 2,)),
values=tf.placeholder(dtype=tf.float32, shape=(10,)),
dense_shape=tf.placeholder(dtype=tf.int64, shape=(2,)))
dumb(sparse_tensor)
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(
connected_subgraph.inputs["inputs"], tf.SparseTensor)
self.assertIsInstance(connected_subgraph.outputs, tf.SparseTensor)
check()
_copy_default_graph()
check()
def testModuleInfo_tuple(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
ph_1 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb((ph_0, ph_1))
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"], tuple)
self.assertIsInstance(connected_subgraph.outputs, tuple)
check()
_copy_default_graph()
check()
def testModuleInfo_namedtuple(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
ph_1 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb(DumbNamedTuple(ph_0, ph_1))
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertTrue(
base_info._is_namedtuple(connected_subgraph.inputs["inputs"]))
self.assertTrue(base_info._is_namedtuple(connected_subgraph.outputs))
check()
_copy_default_graph()
check()
def testModuleInfo_dict(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
ph_1 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb({"ph_0": ph_0, "ph_1": ph_1})
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"], dict)
self.assertIsInstance(connected_subgraph.outputs, dict)
check()
_copy_default_graph()
check()
def testModuleInfo_not_a_tensor(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
dumb(NotATensor())
def check(check_type):
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"], check_type)
self.assertIsInstance(connected_subgraph.outputs, check_type)
check(NotATensor)
_copy_default_graph()
check(base_info._UnserializableObject)
def testModuleInfo_recursion(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a", no_nest=True)
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
val = {"one": ph_0, "self": None}
val["self"] = val
dumb(val)
def check(check_type):
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"]["one"],
tf.Tensor)
self.assertIsInstance(
connected_subgraph.inputs["inputs"]["self"], check_type)
self.assertIsInstance(connected_subgraph.outputs["one"], tf.Tensor)
self.assertIsInstance(connected_subgraph.outputs["self"], check_type)
check(dict)
_copy_default_graph()
check(base_info._UnserializableObject)
if __name__ == "__main__":
tf.test.main()
| 38.471186
| 79
| 0.705525
|
4a056ed5b40327e799e3e5923f916f8751bc544e
| 1,776
|
py
|
Python
|
credentials.py
|
3xistentialcrisis/Password
|
dda6343302d7048c90c34c36a0b1a3e240cd95de
|
[
"MIT"
] | null | null | null |
credentials.py
|
3xistentialcrisis/Password
|
dda6343302d7048c90c34c36a0b1a3e240cd95de
|
[
"MIT"
] | null | null | null |
credentials.py
|
3xistentialcrisis/Password
|
dda6343302d7048c90c34c36a0b1a3e240cd95de
|
[
"MIT"
] | null | null | null |
import string
from random import choice
class Credential:
"""
Class that shall generate new instances of user credentials.
"""
#Credentials details array
credential_details = []
def __init__(self,fname,cred_account,cred_username,cred_password):
"""
This __init__ method defines the properties of the Credentials object
Args:
fname : the user's real first name
cred_account: name of new credential
cred_username : username of the credential account
cred_password : password of the crential account
"""
self.fname = fname
self.cred_account = cred_account
self.cred_username = cred_username
self.cred_password = cred_password
def save_new_credential(self):
"""
This is a method that saves a new credential to the credential_details array
"""
Credential.credential_details.append(self)
@classmethod
def display_new_credentials(cls, password):
"""
This is a method that displays all the newly created credentials
"""
#New credentials empty array
all_credential_details = []
for credential in cls.credential_details:
if credential.cred_password == password:
all_credential_details.append(credential)
return all_credential_details
@classmethod
def generate_new_password(cls):
"""
This method generates a random new mixed alphanumeric password
"""
size = 10 #length of password
alphanum = string.ascii_uppercase + string.digits + string.ascii_lowercase
gen_password = ''.join( choice(alphanum) for num in range(size) )
return gen_password
| 29.6
| 84
| 0.652027
|
4a056f1fe9687e8863d39dcea805b98daf2a5e6a
| 89,590
|
py
|
Python
|
clients/melati.py
|
Plow-Controller/Plow
|
785b41b10a1a96420d917d3204b9f26aa535ebc7
|
[
"MIT"
] | 3
|
2021-08-18T01:30:41.000Z
|
2021-08-21T02:33:11.000Z
|
clients/melati.py
|
Plow-Controller/plow
|
785b41b10a1a96420d917d3204b9f26aa535ebc7
|
[
"MIT"
] | null | null | null |
clients/melati.py
|
Plow-Controller/plow
|
785b41b10a1a96420d917d3204b9f26aa535ebc7
|
[
"MIT"
] | null | null | null |
from PyQt5 import QtCore, QtGui, QtWidgets
import os
import sys
import subprocess
import plow_rc
def updateclient():
os.system('echo "Lets get Melati updated for you." ; cd ~/melati-blockchain ; . ./activate ; melati stop all ; deactivate ; sudo kill -9 $(sudo lsof -t -i:57400,2448,2449,2447,2444) ; cd ~/ ; mkdir ~/plow ; mkdir ~/plow/old-config ; rm -f --recursive -d ~/plow/old-config/.melati ; echo "Saving Mainnet folder in ~plow/old-config" ; mv ~/.melati -t ~/plow/old-config/ ; rm -f --recursive -d ~/melati-blockchain ; git clone https://github.com/Melati-Network/melati-blockchain.git ; cd ~/melati-blockchain ; sh install.sh ; . ./activate ; melati init ; cd ~/plow/old-config/.melati/mainnet ; mv db wallet -t ~/.melati/mainnet ; cd ~/melati-blockchain ; melati start farmer ; echo "Melati has been Updated! The old Mainnet folder is saved in ~/plow/old-config, Add Your Plot Directories!" ; xdg-open ~/.melati/mainnet/config/config.yaml ; xdg-open ~/plow/plot-directories.yaml')
def installcli():
os.system('echo "Lets get Melati installed for you." ; cd ~/ ; sudo kill -9 $(sudo lsof -t -i:57400,2448,2449,2447,2444) ; git clone https://github.com/Melati-Network/melati-blockchain.git ; cd ~/melati-blockchain ; sh install.sh ; . ./activate ; melati init ; cd ~/plow/old-config/.melati/mainnet ; mv db wallet -t ~/.melati/mainnet ; cd ~/melati-blockchain ; melati keys add ; melati start farmer ; echo "Melati has been installed, Add Your Plot Directories!!" ; xdg-open ~/.melati/mainnet/config/config.yaml ; xdg-open ~/plow/plot-directories.yaml')
#uninstall
def uninstallclient():
os.system('echo "Lets get Melati uninstalled for you." ; cd ~/melati-blockchain ; . ./activate ; melati stop all ; deactivate ; cd ~/ ; mkdir ~/plow ; mkdir ~/plow/old-config ; rm -f --recursive -d ~/plow/old-config/.melati ; mv ~/.melati -t ~/plow/old-config/ ; rm -f --recursive -d ~/melati-blockchain ; echo "Melati has been Uninstalled. Old Mainnet folder is saved in ~/plow/old-config"')
def deleteclient():
os.system('echo "Lets get Melati deleted for you." ; cd ~/ ; mkdir ~/plow ; mkdir ~/plow/old-config ; rm -f --recursive -d ~/plow/old-config/.melati ; mv ~/.melati -t ~/plow/old-config/ ; rm -f --recursive -d ~/melati-blockchain ; echo "Melati has been Deleted. The old Mainnet folder has been saved in ~/plow/old-config"')
def keysshow():
os.system('echo "Getting Melati Fingerprint, Pool & Farmer Public Plot Keys..." ; cd ~/melati-blockchain ; . ./activate ; melati keys show')
def mnemonic():
os.system('echo "Getting Active Melati Mnemonic Key..." ; cd ~/melati-blockchain ; . ./activate ; melati keys show --show-mnemonic-seed')
def genandprintkey():
os.system('echo "Generating and Printing New Melati Mnemonic Key..." ; cd ~/melati-blockchain ; . ./activate ; melati generate_and_print')
def addkey():
os.system('echo "Add Existing Melati Keys..." ; cd ~/melati-blockchain ; . ./activate ; melati keys add')
def genkey():
os.system('echo "Generating New Melati Mnemonic key..." ; cd ~/melati-blockchain ; . ./activate ; melati keys generate')
def stop():
os.system('echo "Stopping Melati..." ; cd ~/melati-blockchain ; . ./activate ; melati stop all')
def version():
os.system('echo "Melati Verion Number..." ; cd ~/melati-blockchain ; . ./activate ; melati version')
def startcli():
os.system('echo "Starting Melati..." ; cd ~/melati-blockchain ; . ./activate ; melati stop all ; deactivate ; cd ~/ ; cd ~/melati-blockchain ; . ./activate ; melati start farmer')
def chain():
os.system('echo "Getting The State Of Melati Blockchain..." ; cd ~/melati-blockchain ; . ./activate ; melati show -s')
def connections():
os.system('echo "Getting Melati Peer Connections..." ; cd ~/melati-blockchain ; . ./activate ; melati show -c')
def wallet():
os.system('echo "Melati Wallet Is Loading..." ; cd ~/melati-blockchain ; . ./activate ; melati wallet show')
def getaddress():
os.system('echo "Getting Melati Address..." ; cd ~/melati-blockchain ; . ./activate ; melati wallet get_address')
def gettransactions():
os.system('echo "Getting Melati Transactions..." ; cd ~/melati-blockchain ; . ./activate ; melati wallet get_transactions')
def summary():
os.system('echo "Melati Farm Summary Loading..." ; cd ~/melati-blockchain ; . ./activate ; melati farm summary')
def challenges():
os.system('echo "Melati Challenges Loading..." ; cd ~/melati-blockchain ; . ./activate ; melati farm challenges ; echo "End Of Challenges"')
def config():
os.system('echo "Opening Melati Config File..." ; xdg-open ~/.melati/mainnet/config/config.yaml')
def log():
os.system('echo "Opening Melati Log File..." ; xdg-open ~/.melati/mainnet/log/debug.log')
def startgui():
os.system('echo "Starting Melati-GUI..." ; cd ~/melati-blockchain ; . ./activate ; cd melati-blockchian-gui ; npm run electron')
def installgui():
os.system('echo "Lets get Melati-GUI installed for you." ; cd ~/melati-blockchain ; . ./activate ; sh install-gui.sh ; cd melati-blockchian-gui ; npm run electron')
def closedaemonport():
os.system('echo "Closing Melati Daemon Port..." ; sudo kill -9 $(sudo lsof -t -i:57400) ; echo "Process IDs On Daemon Port 57400 Closed"')
def closefarmerport():
os.system('echo "Closing Melati Farmer Port..." ; sudo kill -9 $(sudo lsof -t -i:2447) ; echo "Process IDs On Farmer Port 2447 Closed"')
def closeharvesterport():
os.system('echo "Closing Melati Harvester Port..." ; sudo kill -9 $(sudo lsof -t -i:2448) ; echo "Process IDs On Harvester Port 2448 Closed"')
def closefullnodeport():
os.system('echo "Closing Melati Full-Node Port..." ; sudo kill -9 $(sudo lsof -t -i:2444) ; echo "Process IDs On Full Node Port 2444 Closed"')
def closewalletport():
os.system('echo "Closing Melati Wallet Port..." ; sudo kill -9 $(sudo lsof -t -i:2449) ; echo "Process IDs on Wallet Port 2449 Closed"')
def listports():
os.system('echo "List of All Melati Process IDs for Ports, Daemon 57400, Harvester 2448, Wallet 2449, Farmer 2447, Full Node 2444." ; echo "For more details on Active Process IDs, use Extras Tab on Main Window." ; sudo lsof -t -i:57400,2448,2449,2447,2444')
def closeallport():
os.system('echo "Closing All Melati Ports..." ; sudo kill -9 $(sudo lsof -t -i:57400,2448,2449,2447,2444) ; echo "All Process IDs on Ports, Daemon 57400, Harvester 2448, Wallet 2449, Farmer 2447, Full Node 2444, are closed"')
def terminal():
subprocess.call('echo "Opening Terminal in Melati Folder" ; cd ~/melati-blockchain ; x-terminal-emulator -- ', shell =True)
class Ui_MelatiWindow(object):
def setupUi(self, MelatiWindow):
MelatiWindow.setObjectName("MelatiWindow")
MelatiWindow.resize(483, 320)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(28, 113, 216))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(102, 171, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(65, 142, 235))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(14, 57, 108))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(19, 75, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(7, 69, 88))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(141, 184, 235))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(28, 113, 216))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(102, 171, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(65, 142, 235))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(14, 57, 108))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(19, 75, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(7, 69, 88))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(141, 184, 235))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(14, 57, 108))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(28, 113, 216))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(102, 171, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(65, 142, 235))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(14, 57, 108))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(19, 75, 144))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(14, 57, 108))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(14, 57, 108))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(7, 69, 88))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(7, 69, 88))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(28, 113, 216))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
#START MAIN WINDOW
MelatiWindow.setPalette(palette)
#ICON
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/theme/plow.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MelatiWindow.setWindowIcon(icon)
#MAIN WINDOWS SPECS
self.centralwidget = QtWidgets.QWidget(MelatiWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout_3 = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setSpacing(0)
self.gridLayout_3.setObjectName("gridLayout_3")
self.gridLayoutmain = QtWidgets.QGridLayout()
self.gridLayoutmain.setSpacing(0)
self.gridLayoutmain.setObjectName("gridLayoutmain")
self.MainTab = QtWidgets.QTabWidget(self.centralwidget)
self.MainTab.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.MainTab.sizePolicy().hasHeightForWidth())
self.MainTab.setSizePolicy(sizePolicy)
#END MAIN WINDOW SPECS
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(25, 186, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 226, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(69, 206, 241))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(12, 93, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(17, 124, 151))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(119, 118, 123))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(140, 220, 241))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(25, 186, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 226, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(69, 206, 241))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(12, 93, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(17, 124, 151))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(119, 118, 123))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(140, 220, 241))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(12, 93, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(25, 186, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 226, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(69, 206, 241))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(12, 93, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(17, 124, 151))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(12, 93, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(12, 93, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(25, 186, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.MainTab.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Fira Sans")
font.setBold(True)
font.setWeight(75)
self.MainTab.setFont(font)
self.MainTab.setMouseTracking(True)
self.MainTab.setTabletTracking(True)
self.MainTab.setAutoFillBackground(True)
self.MainTab.setStyleSheet("")
self.MainTab.setObjectName("MainTab")
#START DASHBOARD TAB
self.dashboardtab = QtWidgets.QWidget()
self.dashboardtab.setObjectName("dashboardtab")
self.gridLayout_2 = QtWidgets.QGridLayout(self.dashboardtab)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setSpacing(0)
self.gridLayout_2.setObjectName("gridLayout_2")
#connections
self.connections = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.connections.sizePolicy().hasHeightForWidth())
self.connections.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.connections.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.connections.setFont(font)
self.connections.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.connections.setObjectName("connections")
self.gridLayout_2.addWidget(self.connections, 7, 1, 1, 2)
self.connections.clicked.connect(connections)
#summary
self.summary = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.summary.sizePolicy().hasHeightForWidth())
self.summary.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.summary.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.summary.setFont(font)
self.summary.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.summary.setObjectName("summary")
self.gridLayout_2.addWidget(self.summary, 0, 1, 1, 2)
self.summary.clicked.connect(summary)
#config
self.config = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.config.sizePolicy().hasHeightForWidth())
self.config.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.config.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.config.setFont(font)
self.config.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.config.setObjectName("config")
self.gridLayout_2.addWidget(self.config, 3, 2, 1, 1)
self.config.clicked.connect(config)
#wallet
self.wallet = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.wallet.sizePolicy().hasHeightForWidth())
self.wallet.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.wallet.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.wallet.setFont(font)
self.wallet.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.wallet.setObjectName("wallet")
self.gridLayout_2.addWidget(self.wallet, 1, 0, 1, 3)
self.wallet.clicked.connect(wallet)
#chain
self.chain = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.chain.sizePolicy().hasHeightForWidth())
self.chain.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.chain.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.chain.setFont(font)
self.chain.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.chain.setObjectName("chain")
self.gridLayout_2.addWidget(self.chain, 7, 0, 1, 1)
self.chain.clicked.connect(chain)
#startcli
self.startcli = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.startcli.sizePolicy().hasHeightForWidth())
self.startcli.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.startcli.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.startcli.setFont(font)
self.startcli.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.startcli.setObjectName("startcli")
self.gridLayout_2.addWidget(self.startcli, 0, 0, 1, 1)
self.startcli.clicked.connect(startcli)
#startgui
self.startgui = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.startgui.sizePolicy().hasHeightForWidth())
self.startgui.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.startgui.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.startgui.setFont(font)
self.startgui.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.startgui.setObjectName("startgui")
self.gridLayout_2.addWidget(self.startgui, 10, 2, 1, 1)
self.startgui.clicked.connect(startgui)
#log
self.log = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.log.sizePolicy().hasHeightForWidth())
self.log.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.log.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.log.setFont(font)
self.log.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.log.setObjectName("log")
self.gridLayout_2.addWidget(self.log, 3, 0, 1, 2)
self.log.clicked.connect(log)
#challenges
self.challenges = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.challenges.sizePolicy().hasHeightForWidth())
self.challenges.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.challenges.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.challenges.setFont(font)
self.challenges.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.challenges.setObjectName("challenges")
self.gridLayout_2.addWidget(self.challenges, 10, 0, 1, 2)
self.challenges.clicked.connect(challenges)
#stoplayout
self.stoplayout = QtWidgets.QVBoxLayout()
self.stoplayout.setSpacing(0)
self.stoplayout.setObjectName("stoplayout")
self.stop = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.stop.sizePolicy().hasHeightForWidth())
self.stop.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.stop.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.stop.setFont(font)
self.stop.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.stop.setObjectName("stop")
self.stoplayout.addWidget(self.stop)
self.gridLayout_2.addLayout(self.stoplayout, 11, 0, 1, 1)
self.stop.clicked.connect(stop)
#terminal
self.terminal = QtWidgets.QPushButton(self.dashboardtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.terminal.sizePolicy().hasHeightForWidth())
self.terminal.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.terminal.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.terminal.setFont(font)
self.terminal.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.terminal.setObjectName("terminal")
self.gridLayout_2.addWidget(self.terminal, 11, 1, 1, 2)
self.terminal.clicked.connect(terminal)
#END DASHBOARD TAB
self.MainTab.addTab(self.dashboardtab, "")
#START KEYS TAB
self.keys = QtWidgets.QWidget()
self.keys.setObjectName("keys")
self.gridLayout_7 = QtWidgets.QGridLayout(self.keys)
self.gridLayout_7.setContentsMargins(0, 0, 0, 0)
self.gridLayout_7.setSpacing(0)
self.gridLayout_7.setObjectName("gridLayout_7")
#genkey
self.genkey = QtWidgets.QPushButton(self.keys)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.genkey.sizePolicy().hasHeightForWidth())
self.genkey.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.genkey.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.genkey.setFont(font)
self.genkey.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.genkey.setObjectName("genkey")
self.gridLayout_7.addWidget(self.genkey, 3, 1, 1, 2)
self.genkey.clicked.connect(genkey)
#gettransactions
self.gettransactions = QtWidgets.QPushButton(self.keys)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.gettransactions.sizePolicy().hasHeightForWidth())
self.gettransactions.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.gettransactions.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.gettransactions.setFont(font)
self.gettransactions.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.gettransactions.setObjectName("gettransactions")
self.gridLayout_7.addWidget(self.gettransactions, 0, 1, 1, 2)
self.gettransactions.clicked.connect(gettransactions)
#getaddress
self.getaddress = QtWidgets.QPushButton(self.keys)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.getaddress.sizePolicy().hasHeightForWidth())
self.getaddress.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.getaddress.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.getaddress.setFont(font)
self.getaddress.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.getaddress.setObjectName("getaddress")
self.gridLayout_7.addWidget(self.getaddress, 0, 0, 1, 1)
self.getaddress.clicked.connect(getaddress)
#mnemonic
self.mnemonic = QtWidgets.QPushButton(self.keys)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mnemonic.sizePolicy().hasHeightForWidth())
self.mnemonic.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.mnemonic.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.mnemonic.setFont(font)
self.mnemonic.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.mnemonic.setObjectName("mnemonic")
self.gridLayout_7.addWidget(self.mnemonic, 5, 2, 1, 1)
self.mnemonic.clicked.connect(mnemonic)
#genandprintkey
self.genandprintkey = QtWidgets.QPushButton(self.keys)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.genandprintkey.sizePolicy().hasHeightForWidth())
self.genandprintkey.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.genandprintkey.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.genandprintkey.setFont(font)
self.genandprintkey.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.genandprintkey.setObjectName("genandprintkey")
self.gridLayout_7.addWidget(self.genandprintkey, 5, 0, 1, 2)
self.genandprintkey.clicked.connect(genandprintkey)
#addkey
self.addkey = QtWidgets.QPushButton(self.keys)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.addkey.sizePolicy().hasHeightForWidth())
self.addkey.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(143, 240, 164))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.addkey.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.addkey.setFont(font)
self.addkey.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.addkey.setObjectName("addkey")
self.gridLayout_7.addWidget(self.addkey, 3, 0, 1, 1)
self.addkey.clicked.connect(addkey)
#keysshow
self.keysshow = QtWidgets.QPushButton(self.keys)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.keysshow.sizePolicy().hasHeightForWidth())
self.keysshow.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.keysshow.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.keysshow.setFont(font)
self.keysshow.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.keysshow.setObjectName("keysshow")
self.gridLayout_7.addWidget(self.keysshow, 2, 0, 1, 3)
self.keysshow.clicked.connect(keysshow)
#END KEYS TAB
self.MainTab.addTab(self.keys, "")
#START PORTS TAB
self.portstab = QtWidgets.QWidget()
self.portstab.setObjectName("portstab")
self.gridLayout_6 = QtWidgets.QGridLayout(self.portstab)
self.gridLayout_6.setContentsMargins(0, 0, 0, 0)
self.gridLayout_6.setSpacing(0)
self.gridLayout_6.setObjectName("gridLayout_6")
#closeallport
self.closeallport = QtWidgets.QPushButton(self.portstab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closeallport.sizePolicy().hasHeightForWidth())
self.closeallport.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.closeallport.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.closeallport.setFont(font)
self.closeallport.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.closeallport.setObjectName("closeallport")
self.gridLayout_6.addWidget(self.closeallport, 6, 0, 1, 3)
self.closeallport.clicked.connect(closeallport)
#closefullnodeport
self.closefullnodeport = QtWidgets.QPushButton(self.portstab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closefullnodeport.sizePolicy().hasHeightForWidth())
self.closefullnodeport.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.closefullnodeport.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.closefullnodeport.setFont(font)
self.closefullnodeport.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.closefullnodeport.setObjectName("closefullnodeport")
self.gridLayout_6.addWidget(self.closefullnodeport, 0, 2, 1, 1)
self.closefullnodeport.clicked.connect(closefullnodeport)
#listports
self.listports = QtWidgets.QPushButton(self.portstab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.listports.sizePolicy().hasHeightForWidth())
self.listports.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.listports.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.listports.setFont(font)
self.listports.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.listports.setObjectName("listports")
self.gridLayout_6.addWidget(self.listports, 0, 0, 1, 2)
self.listports.clicked.connect(listports)
#closefarmerport
self.closefarmerport = QtWidgets.QPushButton(self.portstab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closefarmerport.sizePolicy().hasHeightForWidth())
self.closefarmerport.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.closefarmerport.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.closefarmerport.setFont(font)
self.closefarmerport.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.closefarmerport.setObjectName("closefarmerport")
self.gridLayout_6.addWidget(self.closefarmerport, 1, 0, 1, 1)
self.closefarmerport.clicked.connect(closefarmerport)
#closewalletport
self.closewalletport = QtWidgets.QPushButton(self.portstab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closewalletport.sizePolicy().hasHeightForWidth())
self.closewalletport.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.closewalletport.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.closewalletport.setFont(font)
self.closewalletport.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.closewalletport.setObjectName("closewalletport")
self.gridLayout_6.addWidget(self.closewalletport, 4, 2, 1, 1)
self.closewalletport.clicked.connect(closewalletport)
#closeharvesterport
self.closeharvesterport = QtWidgets.QPushButton(self.portstab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closeharvesterport.sizePolicy().hasHeightForWidth())
self.closeharvesterport.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.closeharvesterport.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.closeharvesterport.setFont(font)
self.closeharvesterport.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.closeharvesterport.setObjectName("closeharvesterport")
self.gridLayout_6.addWidget(self.closeharvesterport, 1, 1, 1, 2)
self.closeharvesterport.clicked.connect(closeharvesterport)
#closedaemonport
self.closedaemonport = QtWidgets.QPushButton(self.portstab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closedaemonport.sizePolicy().hasHeightForWidth())
self.closedaemonport.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.closedaemonport.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.closedaemonport.setFont(font)
self.closedaemonport.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.closedaemonport.setObjectName("closedaemonport")
self.gridLayout_6.addWidget(self.closedaemonport, 4, 0, 1, 2)
self.closedaemonport.clicked.connect(closedaemonport)
#END PORTS TAB
self.MainTab.addTab(self.portstab, "")
#START INSTALL TAB
self.installtab = QtWidgets.QWidget()
self.installtab.setObjectName("installtab")
self.gridLayout_4 = QtWidgets.QGridLayout(self.installtab)
self.gridLayout_4.setContentsMargins(0, 0, 0, 0)
self.gridLayout_4.setSpacing(0)
self.gridLayout_4.setObjectName("gridLayout_4")
#installcli
self.installcli = QtWidgets.QPushButton(self.installtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.installcli.sizePolicy().hasHeightForWidth())
self.installcli.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.installcli.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.installcli.setFont(font)
self.installcli.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.installcli.setObjectName("installcli")
self.gridLayout_4.addWidget(self.installcli, 0, 0, 1, 1)
self.installcli.clicked.connect(installcli)
#installgui
self.installgui = QtWidgets.QPushButton(self.installtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.installgui.sizePolicy().hasHeightForWidth())
self.installgui.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.installgui.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.installgui.setFont(font)
self.installgui.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.installgui.setObjectName("installgui")
self.gridLayout_4.addWidget(self.installgui, 1, 0, 1, 1)
self.installgui.clicked.connect(installgui)
#version
self.version = QtWidgets.QPushButton(self.installtab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.version.sizePolicy().hasHeightForWidth())
self.version.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.version.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.version.setFont(font)
self.version.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.version.setObjectName("version")
self.gridLayout_4.addWidget(self.version, 0, 1, 2, 1)
self.version.clicked.connect(version)
#END INSTALL TAB
self.MainTab.addTab(self.installtab, "")
#START UNINSTALL TAB
self.uninstalltab = QtWidgets.QWidget()
self.uninstalltab.setObjectName("uninstalltab")
self.updatelayout = QtWidgets.QGridLayout(self.uninstalltab)
self.updatelayout.setContentsMargins(0, 0, 0, 0)
self.updatelayout.setSpacing(0)
self.updatelayout.setObjectName("updatelayout")
#uninstallclient
self.uninstallclient = QtWidgets.QPushButton(self.uninstalltab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.uninstallclient.sizePolicy().hasHeightForWidth())
self.uninstallclient.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.uninstallclient.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.uninstallclient.setFont(font)
self.uninstallclient.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.uninstallclient.setObjectName("uninstallclient")
self.updatelayout.addWidget(self.uninstallclient, 1, 1, 1, 1)
self.uninstallclient.clicked.connect(uninstallclient)
#updateclient
self.updateclient = QtWidgets.QPushButton(self.uninstalltab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.updateclient.sizePolicy().hasHeightForWidth())
self.updateclient.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.updateclient.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.updateclient.setFont(font)
self.updateclient.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.updateclient.setObjectName("updateclient")
self.updatelayout.addWidget(self.updateclient, 0, 1, 1, 2)
self.updateclient.clicked.connect(updateclient)
#deleteclient
self.deleteclient = QtWidgets.QPushButton(self.uninstalltab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.deleteclient.sizePolicy().hasHeightForWidth())
self.deleteclient.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.deleteclient.setPalette(palette)
font = QtGui.QFont()
font.setFamily("URW Bookman L")
font.setPointSize(16)
font.setBold(False)
font.setItalic(True)
font.setWeight(9)
self.deleteclient.setFont(font)
self.deleteclient.setStyleSheet("border-image: url(:/theme/circuit.jpg);\n"
"font: 75 italic 16pt \"URW Bookman L\";")
self.deleteclient.setObjectName("deleteclient")
self.updatelayout.addWidget(self.deleteclient, 1, 2, 1, 1)
self.deleteclient.clicked.connect(deleteclient)
#END UNINSTALL TAB
self.MainTab.addTab(self.uninstalltab, "")
#TAB LAYOUT
self.gridLayoutmain.addWidget(self.MainTab, 0, 0, 1, 1)
self.gridLayout_3.addLayout(self.gridLayoutmain, 1, 1, 1, 1)
MelatiWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MelatiWindow)
self.MainTab.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MelatiWindow)
def retranslateUi(self, MelatiWindow):
_translate = QtCore.QCoreApplication.translate
MelatiWindow.setWindowTitle(_translate("MelatiWindow", "Melati"))
self.connections.setText(_translate("MelatiWindow", "Connections"))
self.summary.setText(_translate("MelatiWindow", "Farm Summary"))
self.config.setText(_translate("MelatiWindow", "Config"))
self.wallet.setText(_translate("MelatiWindow", "Show Melati Wallet"))
self.chain.setText(_translate("MelatiWindow", "State Of Melati"))
self.startcli.setText(_translate("MelatiWindow", "Start-CLI"))
self.startgui.setText(_translate("MelatiWindow", "Start-GUI"))
self.log.setText(_translate("MelatiWindow", "Log"))
self.challenges.setText(_translate("MelatiWindow", "Challenges"))
self.stop.setText(_translate("MelatiWindow", "Stop"))
self.terminal.setText(_translate("MelatiWindow", "Open Terminal"))
self.MainTab.setTabText(self.MainTab.indexOf(self.dashboardtab), _translate("MelatiWindow", "Dashboard"))
self.genkey.setText(_translate("MelatiWindow", "Generate Key"))
self.gettransactions.setText(_translate("MelatiWindow", "Get Transactions"))
self.getaddress.setText(_translate("MelatiWindow", "Get Address"))
self.mnemonic.setText(_translate("MelatiWindow", "Show Mnemonic"))
self.genandprintkey.setText(_translate("MelatiWindow", "Generate And Print Key"))
self.addkey.setText(_translate("MelatiWindow", "Add Key"))
self.keysshow.setText(_translate("MelatiWindow", "Melati Keys Show"))
self.MainTab.setTabText(self.MainTab.indexOf(self.keys), _translate("MelatiWindow", "Wallet and Keys"))
self.closeallport.setText(_translate("MelatiWindow", "Close All Ports"))
self.closefullnodeport.setText(_translate("MelatiWindow", "Close Full-Node 2444"))
self.listports.setText(_translate("MelatiWindow", "List Active PIDs"))
self.closefarmerport.setText(_translate("MelatiWindow", "Close Farmer 2447"))
self.closewalletport.setText(_translate("MelatiWindow", "Close Wallet 2449"))
self.closeharvesterport.setText(_translate("MelatiWindow", "Close Harvester 2448"))
self.closedaemonport.setText(_translate("MelatiWindow", "Close Daemon 57400"))
self.MainTab.setTabText(self.MainTab.indexOf(self.portstab), _translate("MelatiWindow", "Ports"))
self.installcli.setText(_translate("MelatiWindow", "Install CLI"))
self.installgui.setText(_translate("MelatiWindow", "Install GUI"))
self.version.setText(_translate("MelatiWindow", "Melati Version"))
self.MainTab.setTabText(self.MainTab.indexOf(self.installtab), _translate("MelatiWindow", "Install"))
self.uninstallclient.setText(_translate("MelatiWindow", "UnInstall Melati"))
self.updateclient.setText(_translate("MelatiWindow", "Update Melati"))
self.deleteclient.setText(_translate("MelatiWindow", "Delete Melati"))
self.MainTab.setTabText(self.MainTab.indexOf(self.uninstalltab), _translate("MelatiWindow", "Update/Uninstall"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MelatiWindow = QtWidgets.QMainWindow()
ui = Ui_MelatiWindow()
ui.setupUi(MelatiWindow)
MelatiWindow.show()
sys.exit(app.exec_())
| 55.268353
| 879
| 0.694441
|
4a056f99bb2dd9ff138514b9eed262545f75c327
| 1,171
|
py
|
Python
|
{{ cookiecutter.project_name }}/roles/tasks.py
|
marksweb/cookiecutter-django
|
f8ea97593f768f3d1cc260838d2913bdb13026c4
|
[
"MIT"
] | 13
|
2020-11-12T06:37:59.000Z
|
2022-02-10T11:27:28.000Z
|
{{ cookiecutter.project_name }}/roles/tasks.py
|
marksweb/cookiecutter-django
|
f8ea97593f768f3d1cc260838d2913bdb13026c4
|
[
"MIT"
] | 10
|
2018-05-09T14:35:05.000Z
|
2019-10-22T09:03:05.000Z
|
{{ cookiecutter.project_name }}/roles/tasks.py
|
marksweb/cookiecutter-django
|
f8ea97593f768f3d1cc260838d2913bdb13026c4
|
[
"MIT"
] | 6
|
2018-03-07T18:04:22.000Z
|
2019-03-23T18:38:05.000Z
|
import os
from jinja2 import Environment, FileSystemLoader
from invoke import run, task
def _template_file(template, destination, template_dir='/var/www/deploy/'):
environment = Environment(loader=FileSystemLoader(template_dir))
template = environment.get_template(template)
rendered_template = template.render(os.environ)
if os.path.isfile(destination):
os.unlink(destination)
with open(destination, 'w') as f:
f.write(rendered_template)
@task
def development(context):
"""
Development environment
"""
run('pip3 install -U pip')
run('make requirements-dev')
run('pre-commit install')
run('supervisord -n')
@task
def testing(context):
"""
For the unittesting in our CI
"""
run('make requirements-dev')
run('make test')
@task
def pgbouncer(context):
"""
PG Bouncer to run in the local development
"""
_template_file("pgbouncer/dev/pgbouncer.ini", "/tmp/pgbouncer.ini")
command = "/usr/sbin/pgbouncer -u www-data /tmp/pgbouncer.ini"
# run("apt-get update")
run("apt-get install -y libc-ares2")
run("chmod +x /usr/sbin/pgbouncer")
run(command)
| 23.897959
| 75
| 0.675491
|
4a057022b91377667b45951ea0009a8333c40ad2
| 4,987
|
py
|
Python
|
venv/lib/python3.6/site-packages/compressor/cache.py
|
exdeam/opencrm
|
dfdcfdf99f0b42eb3959171927cb6574583f5ee0
|
[
"MIT"
] | 3
|
2020-03-30T13:11:57.000Z
|
2020-04-22T13:55:31.000Z
|
venv/lib/python3.6/site-packages/compressor/cache.py
|
exdeam/opencrm
|
dfdcfdf99f0b42eb3959171927cb6574583f5ee0
|
[
"MIT"
] | 9
|
2020-10-29T08:03:28.000Z
|
2021-09-08T01:21:10.000Z
|
venv/lib/python3.6/site-packages/compressor/cache.py
|
exdeam/opencrm
|
dfdcfdf99f0b42eb3959171927cb6574583f5ee0
|
[
"MIT"
] | 2
|
2021-01-06T19:25:07.000Z
|
2021-05-14T02:00:19.000Z
|
import json
import hashlib
import os
import socket
import time
from importlib import import_module
from django.core.cache import caches
from django.core.files.base import ContentFile
from django.utils import six
from django.utils.encoding import force_text, smart_bytes
from django.utils.functional import SimpleLazyObject
from compressor.conf import settings
from compressor.storage import default_storage
from compressor.utils import get_mod_func
_cachekey_func = None
def get_hexdigest(plaintext, length=None):
digest = hashlib.sha256(smart_bytes(plaintext)).hexdigest()
if length:
return digest[:length]
return digest
def simple_cachekey(key):
return 'django_compressor.%s' % force_text(key)
def socket_cachekey(key):
return 'django_compressor.%s.%s' % (socket.gethostname(), force_text(key))
def get_cachekey(*args, **kwargs):
global _cachekey_func
if _cachekey_func is None:
try:
mod_name, func_name = get_mod_func(
settings.COMPRESS_CACHE_KEY_FUNCTION)
_cachekey_func = getattr(import_module(mod_name), func_name)
except (AttributeError, ImportError, TypeError) as e:
raise ImportError("Couldn't import cache key function %s: %s" %
(settings.COMPRESS_CACHE_KEY_FUNCTION, e))
return _cachekey_func(*args, **kwargs)
def get_mtime_cachekey(filename):
return get_cachekey("mtime.%s" % get_hexdigest(filename))
def get_offline_hexdigest(render_template_string):
return get_hexdigest(
# Make the hexdigest determination independent of STATIC_URL
render_template_string.replace(
# Cast ``settings.STATIC_URL`` to a string to allow it to be
# a string-alike object to e.g. add ``SCRIPT_NAME`` WSGI param
# as a *path prefix* to the output URL.
# See https://code.djangoproject.com/ticket/25598.
six.text_type(settings.STATIC_URL), ''
)
)
def get_offline_cachekey(source):
return get_cachekey("offline.%s" % get_offline_hexdigest(source))
def get_offline_manifest_filename():
output_dir = settings.COMPRESS_OUTPUT_DIR.strip('/')
return os.path.join(output_dir, settings.COMPRESS_OFFLINE_MANIFEST)
_offline_manifest = None
def get_offline_manifest():
global _offline_manifest
if _offline_manifest is None:
filename = get_offline_manifest_filename()
if default_storage.exists(filename):
with default_storage.open(filename) as fp:
_offline_manifest = json.loads(fp.read().decode('utf8'))
else:
_offline_manifest = {}
return _offline_manifest
def flush_offline_manifest():
global _offline_manifest
_offline_manifest = None
def write_offline_manifest(manifest):
filename = get_offline_manifest_filename()
content = json.dumps(manifest, indent=2).encode('utf8')
default_storage.save(filename, ContentFile(content))
flush_offline_manifest()
def get_templatetag_cachekey(compressor, mode, kind):
return get_cachekey(
"templatetag.%s.%s.%s" % (compressor.cachekey, mode, kind))
def get_mtime(filename):
if settings.COMPRESS_MTIME_DELAY:
key = get_mtime_cachekey(filename)
mtime = cache.get(key)
if mtime is None:
mtime = os.path.getmtime(filename)
cache.set(key, mtime, settings.COMPRESS_MTIME_DELAY)
return mtime
return os.path.getmtime(filename)
def get_hashed_mtime(filename, length=12):
try:
filename = os.path.realpath(filename)
mtime = str(int(get_mtime(filename)))
except OSError:
return None
return get_hexdigest(mtime, length)
def get_hashed_content(filename, length=12):
try:
filename = os.path.realpath(filename)
except OSError:
return None
# should we make sure that file is utf-8 encoded?
with open(filename, 'rb') as file:
return get_hexdigest(file.read(), length)
def get_precompiler_cachekey(command, contents):
return hashlib.sha1(smart_bytes('precompiler.%s.%s' % (command, contents))).hexdigest()
def cache_get(key):
packed_val = cache.get(key)
if packed_val is None:
return None
val, refresh_time, refreshed = packed_val
if (time.time() > refresh_time) and not refreshed:
# Store the stale value while the cache
# revalidates for another MINT_DELAY seconds.
cache_set(key, val, refreshed=True,
timeout=settings.COMPRESS_MINT_DELAY)
return None
return val
def cache_set(key, val, refreshed=False, timeout=None):
if timeout is None:
timeout = settings.COMPRESS_REBUILD_TIMEOUT
refresh_time = timeout + time.time()
real_timeout = timeout + settings.COMPRESS_MINT_DELAY
packed_val = (val, refresh_time, refreshed)
return cache.set(key, packed_val, real_timeout)
cache = SimpleLazyObject(lambda: caches[settings.COMPRESS_CACHE_BACKEND])
| 30.042169
| 91
| 0.702827
|
4a05704c6ccc68d2488993f64e173e4eb8713d0d
| 6,901
|
py
|
Python
|
pysm4.py
|
moehuster/test_union_pay
|
a7b523fd5d1dbd17d19c9cc3df27d7976289d29c
|
[
"MIT"
] | 1
|
2019-11-06T09:01:06.000Z
|
2019-11-06T09:01:06.000Z
|
pysm4.py
|
moehuster/test_union_pay
|
a7b523fd5d1dbd17d19c9cc3df27d7976289d29c
|
[
"MIT"
] | 1
|
2019-11-06T09:02:13.000Z
|
2019-11-16T05:18:36.000Z
|
pysm4.py
|
moehuster/test_union_pay
|
a7b523fd5d1dbd17d19c9cc3df27d7976289d29c
|
[
"MIT"
] | 3
|
2019-04-01T03:54:20.000Z
|
2020-05-31T04:00:44.000Z
|
#-*-coding:utf-8-*-
''' pysm4 implementation '''
import copy
#Expanded SM4 box table
SM4_BOXES_TABLE = [
0xd6, 0x90, 0xe9, 0xfe, 0xcc, 0xe1, 0x3d, 0xb7, 0x16, 0xb6, 0x14, 0xc2, 0x28, 0xfb, 0x2c,
0x05, 0x2b, 0x67, 0x9a, 0x76, 0x2a, 0xbe, 0x04, 0xc3, 0xaa, 0x44, 0x13, 0x26, 0x49, 0x86,
0x06, 0x99, 0x9c, 0x42, 0x50, 0xf4, 0x91, 0xef, 0x98, 0x7a, 0x33, 0x54, 0x0b, 0x43, 0xed,
0xcf, 0xac, 0x62, 0xe4, 0xb3, 0x1c, 0xa9, 0xc9, 0x08, 0xe8, 0x95, 0x80, 0xdf, 0x94, 0xfa,
0x75, 0x8f, 0x3f, 0xa6, 0x47, 0x07, 0xa7, 0xfc, 0xf3, 0x73, 0x17, 0xba, 0x83, 0x59, 0x3c,
0x19, 0xe6, 0x85, 0x4f, 0xa8, 0x68, 0x6b, 0x81, 0xb2, 0x71, 0x64, 0xda, 0x8b, 0xf8, 0xeb,
0x0f, 0x4b, 0x70, 0x56, 0x9d, 0x35, 0x1e, 0x24, 0x0e, 0x5e, 0x63, 0x58, 0xd1, 0xa2, 0x25,
0x22, 0x7c, 0x3b, 0x01, 0x21, 0x78, 0x87, 0xd4, 0x00, 0x46, 0x57, 0x9f, 0xd3, 0x27, 0x52,
0x4c, 0x36, 0x02, 0xe7, 0xa0, 0xc4, 0xc8, 0x9e, 0xea, 0xbf, 0x8a, 0xd2, 0x40, 0xc7, 0x38,
0xb5, 0xa3, 0xf7, 0xf2, 0xce, 0xf9, 0x61, 0x15, 0xa1, 0xe0, 0xae, 0x5d, 0xa4, 0x9b, 0x34,
0x1a, 0x55, 0xad, 0x93, 0x32, 0x30, 0xf5, 0x8c, 0xb1, 0xe3, 0x1d, 0xf6, 0xe2, 0x2e, 0x82,
0x66, 0xca, 0x60, 0xc0, 0x29, 0x23, 0xab, 0x0d, 0x53, 0x4e, 0x6f, 0xd5, 0xdb, 0x37, 0x45,
0xde, 0xfd, 0x8e, 0x2f, 0x03, 0xff, 0x6a, 0x72, 0x6d, 0x6c, 0x5b, 0x51, 0x8d, 0x1b, 0xaf,
0x92, 0xbb, 0xdd, 0xbc, 0x7f, 0x11, 0xd9, 0x5c, 0x41, 0x1f, 0x10, 0x5a, 0xd8, 0x0a, 0xc1,
0x31, 0x88, 0xa5, 0xcd, 0x7b, 0xbd, 0x2d, 0x74, 0xd0, 0x12, 0xb8, 0xe5, 0xb4, 0xb0, 0x89,
0x69, 0x97, 0x4a, 0x0c, 0x96, 0x77, 0x7e, 0x65, 0xb9, 0xf1, 0x09, 0xc5, 0x6e, 0xc6, 0x84,
0x18, 0xf0, 0x7d, 0xec, 0x3a, 0xdc, 0x4d, 0x20, 0x79, 0xee, 0x5f, 0x3e, 0xd7, 0xcb, 0x39,
0x48,
]
# System parameter
SM4_FK = [0xa3b1bac6, 0x56aa3350, 0x677d9197, 0xb27022dc]
# fixed parameter
SM4_CK = [
0x00070e15, 0x1c232a31, 0x383f464d, 0x545b6269,
0x70777e85, 0x8c939aa1, 0xa8afb6bd, 0xc4cbd2d9,
0xe0e7eef5, 0xfc030a11, 0x181f262d, 0x343b4249,
0x50575e65, 0x6c737a81, 0x888f969d, 0xa4abb2b9,
0xc0c7ced5, 0xdce3eaf1, 0xf8ff060d, 0x141b2229,
0x30373e45, 0x4c535a61, 0x686f767d, 0x848b9299,
0xa0a7aeb5, 0xbcc3cad1, 0xd8dfe6ed, 0xf4fb0209,
0x10171e25, 0x2c333a41, 0x484f565d, 0x646b7279
]
SM4_ENCRYPT = 0
SM4_DECRYPT = 1
def rotl(x, n):
return ((x << n) & 0xffffffff) | ((x >> (32 - n)) & 0xffffffff)
def get_uint32_be(key_data):
return (key_data[0] << 24) | (key_data[1] << 16) | (key_data[2] << 8) | (key_data[3])
def put_uint32_be(n):
return [((n >> 24) & 0xff), ((n >> 16) & 0xff), ((n >> 8) & 0xff), ((n) & 0xff)]
class CryptSM4(object):
def __init__(self, mode=SM4_ENCRYPT):
self.sk = [0]*32
self.mode = mode
# Calculating round encryption key.
# args: [in] a: a is a 32 bits unsigned value;
# return: sk[i]: i{0,1,2,3,...31}.
@classmethod
def _round_key(cls, ka):
b = [0, 0, 0, 0]
a = put_uint32_be(ka)
b[0] = SM4_BOXES_TABLE[a[0]]
b[1] = SM4_BOXES_TABLE[a[1]]
b[2] = SM4_BOXES_TABLE[a[2]]
b[3] = SM4_BOXES_TABLE[a[3]]
bb = get_uint32_be(b[0:4])
rk = bb ^ (rotl(bb, 13)) ^ (rotl(bb, 23))
return rk
# Calculating and getting encryption/decryption contents.
# args: [in] x0: original contents;
# args: [in] x1: original contents;
# args: [in] x2: original contents;
# args: [in] x3: original contents;
# args: [in] rk: encryption/decryption key;
# return the contents of encryption/decryption contents.
@classmethod
def _f(cls, x0, x1, x2, x3, rk):
# "T algorithm" == "L algorithm" + "t algorithm".
# args: [in] a: a is a 32 bits unsigned value;
# return: c: c is calculated with line algorithm "L" and nonline algorithm "t"
def _sm4_l_t(ka):
b = [0, 0, 0, 0]
a = put_uint32_be(ka)
b[0] = SM4_BOXES_TABLE[a[0]]
b[1] = SM4_BOXES_TABLE[a[1]]
b[2] = SM4_BOXES_TABLE[a[2]]
b[3] = SM4_BOXES_TABLE[a[3]]
bb = get_uint32_be(b[0:4])
c = bb ^ (rotl(bb, 2)) ^ (rotl(bb, 10)) ^ (rotl(bb, 18)) ^ (rotl(bb, 24))
return c
return x0 ^ _sm4_l_t(x1 ^ x2 ^ x3 ^ rk)
def set_key(self, key, mode):
k = [0]*36
MK = [0, 0, 0, 0]
MK[0] = get_uint32_be(key[0:4])
MK[1] = get_uint32_be(key[4:8])
MK[2] = get_uint32_be(key[8:12])
MK[3] = get_uint32_be(key[12:16])
k[0:4] = list(map(lambda x, y: x ^ y, MK[0:4], SM4_FK[0:4]))
for i in range(32):
k[i + 4] = k[i] ^ (
self._round_key(k[i + 1] ^ k[i + 2] ^ k[i + 3] ^ SM4_CK[i]))
self.sk[i] = k[i + 4]
self.mode = mode
if mode == SM4_DECRYPT:
for idx in range(16):
t = self.sk[idx]
self.sk[idx] = self.sk[31 - idx]
self.sk[31 - idx] = t
def one_round(self, sk, in_put):
out_put = []
ulbuf = [0]*36
ulbuf[0] = get_uint32_be(in_put[0:4])
ulbuf[1] = get_uint32_be(in_put[4:8])
ulbuf[2] = get_uint32_be(in_put[8:12])
ulbuf[3] = get_uint32_be(in_put[12:16])
for idx in range(32):
ulbuf[idx + 4] = self._f(ulbuf[idx], ulbuf[idx + 1],
ulbuf[idx + 2], ulbuf[idx + 3], sk[idx])
out_put += put_uint32_be(ulbuf[35])
out_put += put_uint32_be(ulbuf[34])
out_put += put_uint32_be(ulbuf[33])
out_put += put_uint32_be(ulbuf[32])
return out_put
def crypt_ecb(self, input_data):
''' SM4-ECB block encryption/decryption '''
i = 0
output_data = []
length = len(input_data)
while length > 0:
output_data += self.one_round(self.sk, input_data[i:i+16])
i += 16
length -= 16
return bytes(output_data)
def crypt_cbc(self, iv, input_data):
''' SM4-CBC buffer encryption/decryption '''
i = 0
output_data = []
tmp_input = [0]*16
if self.mode == SM4_ENCRYPT:
length = len(input_data)
while length > 0:
tmp_input[0:16] = list(map(lambda x, y: x ^ y, input_data[i:i+16], iv[0:16]))
output_data += self.one_round(self.sk, tmp_input[0:16])
iv = copy.deepcopy(output_data[i:i+16])
i += 16
length -= 16
return bytes(output_data)
else:
length = len(input_data)
while length > 0:
output_data += self.one_round(self.sk, input_data[i:i+16])
output_data[i:i+16] = list(map(lambda x, y: x ^ y, output_data[i:i+16], iv[0:16]))
iv = copy.deepcopy(input_data[i:i + 16])
i += 16
length -= 16
return bytes(output_data)
| 39.890173
| 98
| 0.55876
|
4a0570715b6198e1fd2c3c8efb5bc882060c2d19
| 3,411
|
py
|
Python
|
alipay/aop/api/domain/SubAccountBalanceFreezeOrder.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | 213
|
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/domain/SubAccountBalanceFreezeOrder.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | 29
|
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/domain/SubAccountBalanceFreezeOrder.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | 59
|
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.MultiCurrencyMoney import MultiCurrencyMoney
from alipay.aop.api.domain.SubAccountBaseInfo import SubAccountBaseInfo
class SubAccountBalanceFreezeOrder(object):
def __init__(self):
self._freeze_amount = None
self._memo = None
self._out_biz_no = None
self._source = None
self._sub_account_base_info = None
@property
def freeze_amount(self):
return self._freeze_amount
@freeze_amount.setter
def freeze_amount(self, value):
if isinstance(value, MultiCurrencyMoney):
self._freeze_amount = value
else:
self._freeze_amount = MultiCurrencyMoney.from_alipay_dict(value)
@property
def memo(self):
return self._memo
@memo.setter
def memo(self, value):
self._memo = value
@property
def out_biz_no(self):
return self._out_biz_no
@out_biz_no.setter
def out_biz_no(self, value):
self._out_biz_no = value
@property
def source(self):
return self._source
@source.setter
def source(self, value):
self._source = value
@property
def sub_account_base_info(self):
return self._sub_account_base_info
@sub_account_base_info.setter
def sub_account_base_info(self, value):
if isinstance(value, SubAccountBaseInfo):
self._sub_account_base_info = value
else:
self._sub_account_base_info = SubAccountBaseInfo.from_alipay_dict(value)
def to_alipay_dict(self):
params = dict()
if self.freeze_amount:
if hasattr(self.freeze_amount, 'to_alipay_dict'):
params['freeze_amount'] = self.freeze_amount.to_alipay_dict()
else:
params['freeze_amount'] = self.freeze_amount
if self.memo:
if hasattr(self.memo, 'to_alipay_dict'):
params['memo'] = self.memo.to_alipay_dict()
else:
params['memo'] = self.memo
if self.out_biz_no:
if hasattr(self.out_biz_no, 'to_alipay_dict'):
params['out_biz_no'] = self.out_biz_no.to_alipay_dict()
else:
params['out_biz_no'] = self.out_biz_no
if self.source:
if hasattr(self.source, 'to_alipay_dict'):
params['source'] = self.source.to_alipay_dict()
else:
params['source'] = self.source
if self.sub_account_base_info:
if hasattr(self.sub_account_base_info, 'to_alipay_dict'):
params['sub_account_base_info'] = self.sub_account_base_info.to_alipay_dict()
else:
params['sub_account_base_info'] = self.sub_account_base_info
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = SubAccountBalanceFreezeOrder()
if 'freeze_amount' in d:
o.freeze_amount = d['freeze_amount']
if 'memo' in d:
o.memo = d['memo']
if 'out_biz_no' in d:
o.out_biz_no = d['out_biz_no']
if 'source' in d:
o.source = d['source']
if 'sub_account_base_info' in d:
o.sub_account_base_info = d['sub_account_base_info']
return o
| 31.293578
| 93
| 0.616535
|
4a0570722fd90f96c01aebde5d40cc98dd44e427
| 2,795
|
py
|
Python
|
Example/Gutenkunst2007/Lee_2003/Nets.py
|
bcdaniels/SloppyCell
|
17e68127a6aba19056a5067748a2d18241cc4d76
|
[
"BSD-3-Clause"
] | 2
|
2020-05-26T19:29:39.000Z
|
2020-08-26T20:54:52.000Z
|
Example/Gutenkunst2007/Lee_2003/Nets.py
|
bcdaniels/SloppyCell
|
17e68127a6aba19056a5067748a2d18241cc4d76
|
[
"BSD-3-Clause"
] | 1
|
2020-05-26T16:50:49.000Z
|
2021-07-08T20:35:35.000Z
|
Example/Gutenkunst2007/Lee_2003/Nets.py
|
jurquiza/SloppyCellUrquiza2019
|
a9f64d9d4172c82735813f09e48f36777a714e9c
|
[
"BSD-3-Clause"
] | 3
|
2017-09-12T03:12:01.000Z
|
2018-10-19T11:08:09.000Z
|
import scipy
from SloppyCell.ReactionNetworks import *
import LeeNet
reload(LeeNet)
from LeeNet import net
# Network that is at unstimulated fixed point
net.set_var_ic('W', 0)
traj1 = Dynamics.integrate(net, [0, 1e5], rtol=1e-12)
unstimulated = net.copy('unstimulated')
for var in unstimulated.dynamicVars.keys():
if not isinstance(net.get_var_ic(var), str):
unstimulated.set_var_ic(var, traj1.get_var_val_index(var, -1))
fig2a = unstimulated.copy('fig2a')
fig2a.set_var_ic('v12', 0)
fig2a.set_var_optimizable('v12', False)
fig2a.set_var_ic('v14', 0)
fig2a.set_var_optimizable('v14', False)
fig2b = unstimulated.copy('fig2b')
fig2b.set_var_ic('v12', 0)
fig2b.set_var_optimizable('v12', False)
fig2b.set_var_ic('v14', 0)
fig2b.set_var_optimizable('v14', False)
fig2b.set_var_ic('X12', 0.2)
fig2c = unstimulated.copy('fig2c')
fig2c.set_var_ic('v12', 0)
fig2c.set_var_optimizable('v12', False)
fig2c.set_var_ic('v14', 0)
fig2c.set_var_optimizable('v14', False)
fig2c.set_var_ic('k2', 0)
fig2c.set_var_optimizable('k2', False)
fig2c.set_var_ic('X2', 1000)
# We also need to make sure Dsh0 is updated.
fig2c.set_var_ic('Dsh0', 1100)
fig2d = unstimulated.copy('fig2d')
fig2d.set_var_ic('v12', 0)
fig2d.set_var_optimizable('v12', False)
fig2d.set_var_ic('v14', 0)
fig2d.set_var_optimizable('v14', False)
fig2d.set_var_ic('k4', 0)
fig2d.set_var_optimizable('k4', False)
fig2d.set_var_ic('k9', 0)
fig2d.set_var_optimizable('k9', False)
fig2e = unstimulated.copy('fig2e')
fig2e.set_var_ic('v12', 0)
fig2e.set_var_optimizable('v12', False)
fig2e.set_var_ic('v14', 0)
fig2e.set_var_optimizable('v14', False)
fig2e.set_var_ic('TCF0', 1000)
# X11, X13, and X14 are assumed to be in equilibrium. Adding TCF changes this
# equilibrium and thus changes the initial concentration of X11 we need to
# use for this integration.
# This number results from constraining the total amount of non-active BCatenin
# to be equal to the steady state unstimulated value: (Denoted BcNA)
BcNA = unstimulated.get_var_ic('X11')
TCF0 = fig2e.get_var_ic('TCF0')
K16 = fig2e.get_var_ic('K16')
val = 0.5*(BcNA - K16 - TCF0 + scipy.sqrt(4*BcNA*K16 + (-BcNA+K16+TCF0)**2))
fig2e.set_var_ic('X11', val)
fig6a = unstimulated.copy('transient_a')
fig6a.add_parameter('lam', 1./20, is_optimizable=False)
fig6a.add_assignment_rule('W', 'exp(-lam*time)')
fig6b = fig6a.copy('transient_b')
fig6b.set_var_ic('v14', fig6a.get_var_ic('v14')*5)
fig6b.set_var_ic('k15', fig6a.get_var_ic('k15')*5)
fig6c = fig6a.copy('transient_c')
fig6c.set_var_ic('v14', fig6a.get_var_ic('v14')/5)
fig6c.set_var_ic('k15', fig6a.get_var_ic('k15')/5)
networks = [fig2a, fig2b, fig2c, fig2d, fig2e, fig6a, fig6b, fig6c]
int_times = [(0, 3*60), (0, 3*60), (0, 3*60), (0, 3*60), (0, 3*60),
(0, 16*60), (0, 16*60), (0, 16*60)]
| 33.27381
| 79
| 0.728444
|
4a0571f29629bdcfe8eb8e492ce5cd171e19c824
| 8,368
|
py
|
Python
|
test/functional/feature_proxy.py
|
GroinGuy/GroinCoin-GXG-1.2.0
|
d71c1b200683a77ccf797d8a500e468351da5ee0
|
[
"MIT"
] | 1
|
2019-06-15T00:27:02.000Z
|
2019-06-15T00:27:02.000Z
|
test/functional/feature_proxy.py
|
GroinGuy/GroinCoin-GXG
|
d71c1b200683a77ccf797d8a500e468351da5ee0
|
[
"MIT"
] | null | null | null |
test/functional/feature_proxy.py
|
GroinGuy/GroinCoin-GXG
|
d71c1b200683a77ccf797d8a500e468351da5ee0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test groincoind with different proxy configuration.
Test plan:
- Start groincoind's with different proxy configurations
- Use addnode to initiate connections
- Verify that proxies are connected to, and the right connection command is given
- Proxy configurations to test on groincoind side:
- `-proxy` (proxy everything)
- `-onion` (proxy just onions)
- `-proxyrandomize` Circuit randomization
- Proxy configurations to test on proxy side,
- support no authentication (other proxy)
- support no authentication + user/pass authentication (Tor)
- proxy on IPv6
- Create various proxies (as threads)
- Create groincoinds that connect to them
- Manipulate the groincoinds using addnode (onetry) an observe effects
addnode connect to IPv4
addnode connect to IPv6
addnode connect to onion
addnode connect to generic DNS name
"""
import socket
import os
from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
from test_framework.test_framework import GroincoinTestFramework
from test_framework.util import (
PORT_MIN,
PORT_RANGE,
assert_equal,
)
from test_framework.netutil import test_ipv6_local
RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE # Start after p2p and rpc ports
class ProxyTest(GroincoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
def setup_nodes(self):
self.have_ipv6 = test_ipv6_local()
# Create two proxies on different ports
# ... one unauthenticated
self.conf1 = Socks5Configuration()
self.conf1.addr = ('127.0.0.1', RANGE_BEGIN + (os.getpid() % 1000))
self.conf1.unauth = True
self.conf1.auth = False
# ... one supporting authenticated and unauthenticated (Tor)
self.conf2 = Socks5Configuration()
self.conf2.addr = ('127.0.0.1', RANGE_BEGIN + 1000 + (os.getpid() % 1000))
self.conf2.unauth = True
self.conf2.auth = True
if self.have_ipv6:
# ... one on IPv6 with similar configuration
self.conf3 = Socks5Configuration()
self.conf3.af = socket.AF_INET6
self.conf3.addr = ('::1', RANGE_BEGIN + 2000 + (os.getpid() % 1000))
self.conf3.unauth = True
self.conf3.auth = True
else:
self.log.warning("Testing without local IPv6 support")
self.serv1 = Socks5Server(self.conf1)
self.serv1.start()
self.serv2 = Socks5Server(self.conf2)
self.serv2.start()
if self.have_ipv6:
self.serv3 = Socks5Server(self.conf3)
self.serv3.start()
# Note: proxies are not used to connect to local nodes
# this is because the proxy to use is based on CService.GetNetwork(), which return NET_UNROUTABLE for localhost
args = [
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'],
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'],
['-listen', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'],
[]
]
if self.have_ipv6:
args[3] = ['-listen', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion']
self.add_nodes(self.num_nodes, extra_args=args)
self.start_nodes()
def node_test(self, node, proxies, auth, test_onion=True):
rv = []
# Test: outgoing IPv4 connection through node
node.addnode("15.61.23.23:1234", "onetry")
cmd = proxies[0].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: groincoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"15.61.23.23")
assert_equal(cmd.port, 1234)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
if self.have_ipv6:
# Test: outgoing IPv6 connection through node
node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry")
cmd = proxies[1].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: groincoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534")
assert_equal(cmd.port, 5443)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
if test_onion:
# Test: outgoing onion connection through node
node.addnode("groincoinostk4e4re.onion:8333", "onetry")
cmd = proxies[2].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"groincoinostk4e4re.onion")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
# Test: outgoing DNS name connection through node
node.addnode("node.noumenon:8333", "onetry")
cmd = proxies[3].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"node.noumenon")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
return rv
def run_test(self):
# basic -proxy
self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False)
# -proxy plus -onion
self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False)
# -proxy plus -onion, -proxyrandomize
rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True)
# Check that credentials as used for -proxyrandomize connections are unique
credentials = set((x.username,x.password) for x in rv)
assert_equal(len(credentials), len(rv))
if self.have_ipv6:
# proxy on IPv6 localhost
self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False, False)
def networks_dict(d):
r = {}
for x in d['networks']:
r[x['name']] = x
return r
# test RPC getnetworkinfo
n0 = networks_dict(self.nodes[0].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n0[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n0[net]['proxy_randomize_credentials'], True)
assert_equal(n0['onion']['reachable'], True)
n1 = networks_dict(self.nodes[1].getnetworkinfo())
for net in ['ipv4','ipv6']:
assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n1[net]['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['reachable'], True)
n2 = networks_dict(self.nodes[2].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n2[net]['proxy_randomize_credentials'], True)
assert_equal(n2['onion']['reachable'], True)
if self.have_ipv6:
n3 = networks_dict(self.nodes[3].getnetworkinfo())
for net in ['ipv4','ipv6']:
assert_equal(n3[net]['proxy'], '[%s]:%i' % (self.conf3.addr))
assert_equal(n3[net]['proxy_randomize_credentials'], False)
assert_equal(n3['onion']['reachable'], False)
if __name__ == '__main__':
ProxyTest().main()
| 41.425743
| 123
| 0.625837
|
4a0571ff86c36789ca1d99f3836a354fb8bf0d90
| 740
|
py
|
Python
|
Tutorials/27fileRead.py
|
Aaryan-R-S/Python-Tutorials
|
89e5ec69e529511137263231d735410e92f0a65d
|
[
"MIT"
] | null | null | null |
Tutorials/27fileRead.py
|
Aaryan-R-S/Python-Tutorials
|
89e5ec69e529511137263231d735410e92f0a65d
|
[
"MIT"
] | null | null | null |
Tutorials/27fileRead.py
|
Aaryan-R-S/Python-Tutorials
|
89e5ec69e529511137263231d735410e92f0a65d
|
[
"MIT"
] | null | null | null |
openFile = open('Tuts/27.txt', "rt") # read + text mode
# openFile = open('Tuts/25.txt', "rb") # read + binary mode
# Print all line by line (dont read file before it) --
# for line in openFile:
# print(line)
# READING--
readFile = openFile.read()
print(readFile)
# OR
# readLine = openFile.readline()
# print(readLine)
# readLine = openFile.readline()
# print(readLine)
# readLine = openFile.readline()
# print(readLine)
# OR
# readLines = openFile.readlines()
# print(readLines)
# Print all characters in new line --
# for char in readFile :
# print(char)
# STEP BY STEP READ 5 characters--
# readFile = openFile.read(10)
# print(readFile)
# readFile = openFile.read(10)
# print(readFile)
openFile.close()
| 22.424242
| 60
| 0.664865
|
4a05726bff54b94cc7dc3865afcf698f5f8a146b
| 970
|
py
|
Python
|
aioftx/markets/markets/api.py
|
metta-team/aioftx
|
f5bd028e8bf40c55c1d4632802b792be113e0978
|
[
"MIT"
] | null | null | null |
aioftx/markets/markets/api.py
|
metta-team/aioftx
|
f5bd028e8bf40c55c1d4632802b792be113e0978
|
[
"MIT"
] | null | null | null |
aioftx/markets/markets/api.py
|
metta-team/aioftx
|
f5bd028e8bf40c55c1d4632802b792be113e0978
|
[
"MIT"
] | null | null | null |
from typing import Optional
from aioftx.session import FTXClientSession
from .schemas import (
GetMarketRequest,
GetMarketResponse,
GetMarketsRequest,
GetMarketsResponse,
Market,
)
async def get_markets(
session: FTXClientSession,
*,
start_time: Optional[int] = None,
end_time: Optional[int] = None
) -> list[Market]:
"""
Get the list of all markets from the FTX API
"""
request = GetMarketsRequest(start_time=start_time, end_time=end_time)
async with session.get(request.url) as resp:
data = await resp.json()
return GetMarketsResponse(**data).data()
async def get_market(session: FTXClientSession, *, market_name: str) -> Market:
"""
Get the details of a specific market from the FTX API
"""
request = GetMarketRequest(market_name=market_name)
async with session.get(request.url) as resp:
data = await resp.json()
return GetMarketResponse(**data).data()
| 26.216216
| 79
| 0.685567
|
4a057279ae8e370e548f6972d1d6326045c67aa3
| 988
|
py
|
Python
|
handy/network/upload.py
|
Al3xChen/handy
|
a4af40fad2e75876ecd1b5918f7ca5c325ae281c
|
[
"MIT"
] | 5
|
2018-04-03T10:24:04.000Z
|
2020-01-16T08:04:12.000Z
|
handy/network/upload.py
|
deep2cv/handy
|
a4af40fad2e75876ecd1b5918f7ca5c325ae281c
|
[
"MIT"
] | 1
|
2021-03-19T10:32:17.000Z
|
2021-03-19T10:32:17.000Z
|
handy/network/upload.py
|
Al3xChen/handy
|
a4af40fad2e75876ecd1b5918f7ca5c325ae281c
|
[
"MIT"
] | 1
|
2021-04-17T19:28:15.000Z
|
2021-04-17T19:28:15.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 30 19:26:46 2018
@author: Frank
"""
from handy.crypto.encrypt import decryptfromfile
import os
def download_repo():
key = input('Type Key:\n')
repo = input('Load Repo File:\n')
repo_path = decryptfromfile(repo,key)
if not repo_path:
print('decrypting failed')
return
from sys import platform
if platform == "linux" or platform == "linux2":
os.system("gnome-terminal -e 'bash -c \"sudo apt-get update; exec bash\"'")
elif platform == "darwin":
pass
elif platform == "win32":
os.system("start /B start cmd.exe @cmd /k ")
import pyautogui
pyautogui.typewrite('git clone %s'%repo_path,interval=0.01)
pyautogui.press('enter')
def upload_repo():
comment = input('Type Comment:\n')
os.system('git add .')
os.system('git commit -m %s'%comment)
os.system('git push origin master')
def main():
pass
if __name__ == '__main__':
main()
| 25.333333
| 83
| 0.62247
|
4a0572bc026243d9eacb19f6ae262c917a140b3d
| 1,388
|
py
|
Python
|
tests/functional/trigger/alter/test_08.py
|
reevespaul/firebird-qa
|
98f16f425aa9ab8ee63b86172f959d63a2d76f21
|
[
"MIT"
] | null | null | null |
tests/functional/trigger/alter/test_08.py
|
reevespaul/firebird-qa
|
98f16f425aa9ab8ee63b86172f959d63a2d76f21
|
[
"MIT"
] | null | null | null |
tests/functional/trigger/alter/test_08.py
|
reevespaul/firebird-qa
|
98f16f425aa9ab8ee63b86172f959d63a2d76f21
|
[
"MIT"
] | null | null | null |
#coding:utf-8
#
# id: functional.trigger.alter.08
# title: ALTER TRIGGER - POSITION
# decription: ALTER TRIGGER - POSITION
#
# Dependencies:
# CREATE DATABASE
# CREATE TABLE
# CREATE TRIGGER
# SHOW TRIGGER
# tracker_id:
# min_versions: []
# versions: 1.0
# qmid: functional.trigger.alter.alter_trigger_08
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 1.0
# resources: None
substitutions_1 = [('\\+.*', ''), ('\\=.*', ''), ('Trigger text.*', '')]
init_script_1 = """
CREATE TABLE test( id INTEGER NOT NULL CONSTRAINT unq UNIQUE, text VARCHAR(32));
SET TERM ^;
CREATE TRIGGER tg FOR test BEFORE UPDATE
AS
BEGIN
new.id=1;
END ^
SET TERM ;^
commit;
"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
test_script_1 = """
ALTER TRIGGER tg POSITION 20;
SHOW TRIGGER tg;
"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
Triggers on Table TEST:
TG, Sequence: 20, Type: BEFORE UPDATE, Active
AS
BEGIN
new.id=1;
END
"""
@pytest.mark.version('>=1.0')
def test_1(act_1: Action):
act_1.expected_stdout = expected_stdout_1
act_1.execute()
assert act_1.clean_expected_stdout == act_1.clean_stdout
| 22.754098
| 84
| 0.612392
|
4a057336fea0f10767024abe5c41e5dd15142f48
| 912
|
py
|
Python
|
sc2spreadsheet/configuration.py
|
krzosa/sc2spreadsheet
|
3c64b3a6cfb0c3431a18530f639f54d2f6ed1a63
|
[
"MIT"
] | null | null | null |
sc2spreadsheet/configuration.py
|
krzosa/sc2spreadsheet
|
3c64b3a6cfb0c3431a18530f639f54d2f6ed1a63
|
[
"MIT"
] | null | null | null |
sc2spreadsheet/configuration.py
|
krzosa/sc2spreadsheet
|
3c64b3a6cfb0c3431a18530f639f54d2f6ed1a63
|
[
"MIT"
] | null | null | null |
# Usernames of all accounts that you play on (ingame username not battle.net)
usernames = ["Sevnin", "Owen"]
# paths to the directories with the replays
# ex. C:\Users\sevni\Documents\StarCraft II\Accounts\102433852\2-S2-1-2701509\Replays\Multiplayer
# format [r"pathToReplays1", r"pathToReplays2", r"pathToReplays3"] etc.
# ex.
# replaysDirectories = [r"C:\Users \sevni\Documents\StarCraft II\Accounts\102433852\2-S2-1-2701509\Replays\Multiplayer",
# r"C:\Users\sevni\Documents\StarCraft II\Accounts\461713565\2-S2-1-7737411\Replays\Multiplayer" ]
replaysDirectories = [r"..\replays"]
# Google spreadsheet
# .json file that contains your google credentials
googleSpreadsheetCredentials = 'client_secret.json'
# name of the spreadsheet in google spreadsheet
# remember to add the mail from google api to the
# shared mails in the spreadsheet!
sheetName = "Starcraft2Spreadsheet"
| 38
| 121
| 0.753289
|
4a057369f7a8e10463bb7e9d970096def6f9dc60
| 20,778
|
py
|
Python
|
src/epivizfileserver/measurements/measurementClass.py
|
jkanche/epivizFileParser
|
29caf7a0ee2dee6396cbd77ac2d50bb849607b17
|
[
"MIT"
] | 1
|
2021-02-11T09:46:07.000Z
|
2021-02-11T09:46:07.000Z
|
src/epivizfileserver/measurements/measurementClass.py
|
jkanche/epivizFileParser
|
29caf7a0ee2dee6396cbd77ac2d50bb849607b17
|
[
"MIT"
] | 3
|
2016-02-23T20:48:12.000Z
|
2021-09-22T00:36:19.000Z
|
src/epivizfileserver/measurements/measurementClass.py
|
jkanche/epivizFileParser
|
29caf7a0ee2dee6396cbd77ac2d50bb849607b17
|
[
"MIT"
] | 4
|
2020-07-29T14:54:50.000Z
|
2021-09-30T20:56:23.000Z
|
from ..handler import FileHandlerProcess
import parser
import pandas as pd
import requests
import numpy as np
from random import randrange
# import umsgpack
from aiocache import cached, Cache
from aiocache.serializers import JsonSerializer, PickleSerializer
# import logging
# logger = logging.getLogger(__name__)
from sanic.log import logger as logging
class Measurement(object):
"""
Base class for managing measurements from files
Args:
mtype: Measurement type, either 'file' or 'db'
mid: unique id to use for this measurement
name: name of the measurement
source: location of the measurement, if mtype is 'db' use table name, if file, file location
datasource: is the database name if mtype is 'db' use database name, else 'files'
annotation: annotation for this measurement, defaults to None
metadata: metadata for this measurement, defaults to None
isComputed: True if this measurement is Computed from other measurements, defaults to False
isGenes: True if this measurement is an annotation (for example: reference genome hg19), defaults to False
minValue: min value of all values, defaults to None
maxValue: max value of all values, defaults to None
columns: column names for the file
"""
def __init__(self, mtype, mid, name, source, datasource, genome=None, annotation=None, metadata=None, isComputed=False, isGenes=False, minValue=None, maxValue=None, columns=None):
self.mtype = mtype # measurement_type (file/db)
self.mid = mid # measurement_id (column name in db/file)
self.name = name # measurement_name
self.source = source # tbl name / file location
self.datasource = datasource # dbname / "files"
self.annotation = annotation
self.metadata = metadata
self.isComputed = isComputed
self.isGenes = isGenes
self.minValue = minValue
self.maxValue = maxValue
self.columns = columns
self.genome = genome
if self.annotation is None:
self.annotation = {}
self.annotation["genome"] = genome
def get_data(self, chr, start, end):
"""
Get Data for this measurement
Args:
chr: chromosome
start: genomic start
end: genomic end
"""
raise Exception("NotImplementedException")
def get_status(self):
"""
Get status of this measurement (most pertinent for files)
"""
raise Exception("NotImplementedException")
def get_measurement_name(self):
"""Get measurement name
"""
return self.name
def get_measurement_id(self):
"""Get measurement id
"""
return self.mid
def get_measurement_type(self):
"""Get measurement type
"""
return self.mtype
def get_measurement_source(self):
"""Get source
"""
return self.source
def get_measurement_annotation(self):
"""Get measurement annotation
"""
return self.annotation
def get_measurement_genome(self):
"""Get measurement genome
"""
return self.genome
def get_measurement_metadata(self):
"""Get measurement metadata
"""
return self.metadata
def get_measurement_min(self):
"""Get measurement min value
"""
return self.minValue
def get_measurement_max(self):
"""Get measurement max value
"""
return self.maxValue
def is_file(self):
"""Is measurement a file ?
"""
if self.mtype == "db":
return False
return True
def is_computed(self):
"""Is measurement computed ?
"""
return self.isComputed
def is_gene(self):
"""is the file a genome annotation ?
"""
return self.isGenes
def get_columns(self):
"""get columns from file
"""
columns = []
if self.metadata is not None:
columns = self.metadata
columns.append(self.mid)
return columns
def bin_rows_legacy(self, data, chr, start, end, bins = 2000):
"""Bin genome by bin length and summarize the bin
Args:
data: DataFrame from the file
chr: chromosome
start: genomic start
end: genomic end
length: max rows to summarize the data frame into
Returns:
a binned data frame whose max rows is length
"""
logging.debug("Measurement: %s\t%s\t%s" %(self.mid, self.name, "bin_rows"))
freq = round((end-start)/bins)
if end - start < bins:
freq = 1
data = data.set_index(['start', 'end'])
data.index = pd.IntervalIndex.from_tuples(data.index)
bins = pd.interval_range(start=start, end=end, freq=freq)
bins_df = pd.DataFrame(index=bins)
bins_df["chr"] = chr
if self.metadata:
for meta in self.metadata:
bins_df[meta] = data[meta]
for col in self.get_columns():
bins_df[col] = None
# map data to bins
for index, row in data.iterrows():
for col in self.get_columns():
bins_df.loc[index, col] = row[col]
bins_df["start"] = bins_df.index.left
bins_df["end"] = bins_df.index.right
return pd.DataFrame(bins_df)
def bin_rows(self, data, chr, start, end, columns=None, metadata=None, bins = 400):
if len(data) == 0 or len(data) <= bins:
return data, None
row_cut = pd.cut(data.index, bins=bins)
rows = {}
groups = data.groupby(row_cut)
rows["start"] = groups["start"].first()
rows["end"] = groups["end"].last()
for col in columns:
rows[col] = groups[col].mean()
return pd.DataFrame.from_dict(rows), None
def query(self, obj, query_params):
"""Query from db/source
Args:
obj: db obj
query_params: query parameters to search
"""
raise Exception("NotImplementedException")
class DbMeasurement(Measurement):
"""
Class representing a database measurement
In addition to params from the base measurement class -
Args:
dbConn: a database connection object
Attributes:
connection: a database connection object
"""
def __init__(self, mtype, mid, name, source, datasource, dbConn, genome=None, annotation=None, metadata=None, isComputed=False, isGenes=False, minValue=None, maxValue=None, columns=None):
super(DbMeasurement, self).__init__(mtype, mid, name, source, datasource, genome, annotation, metadata, isComputed, isGenes, minValue, maxValue, columns)
self.query_range = "select distinct %s from %s where chr=%s and end >= %s and start < %s order by chr, start"
self.query_all = "select distinct %s from %s order by chr, start"
self.connection = dbConn
def query(self, obj, params):
"""Query from db/source
Args:
obj: the query string
query_params: query parameters to search
Returns:
a dataframe of results from the database
"""
query = obj % params
df = pd.read_sql(query, con=self.connection)
return df
async def get_data(self, chr, start, end, bin=False):
"""Get data for a genomic region from database
Args:
chr (str): chromosome
start (int): genomic start
end (int): genomic end
bin (bool): True to bin the results, defaults to False
Returns:
a dataframe with results
"""
query = None
query_params = []
query_ms = "id, chr, start, end, " + self.mid + " "
if self.metadata is not None:
metadata = ", ".join(self.metadata)
query_ms = query_ms + ", " + metadata
if self.isGenes:
query_params = (
str(query_ms) + ", strand",
str(self.source),
'"' + str(chr) + '"',
int(start),
int(end))
query = self.query_range
else:
if chr is None:
query_params = (
str(query_ms),
str(self.source))
query = self.query_all
else:
query_params = (
str(query_ms),
str(self.source),
'"' + str(chr) + '"',
int(start),
int(end))
query = self.query_range
try:
result = self.query(query, query_params)
if bin:
result = self.bin_rows(result, chr, start, end)
return result, None
except Exception as e:
return {}, str(e)
class FileMeasurement(Measurement):
"""
Class for file based measurement
In addition to params from the base `Measurement` class
Args:
fileHandler: an optional file handler object to process query requests (uses dask)
"""
def __init__(self, mtype, mid, name, source, datasource="files", genome=None, annotation=None, metadata=None, isComputed=False, isGenes=False, minValue=None, maxValue=None,fileHandler=None, columns=None):
super(FileMeasurement, self).__init__(mtype, mid, name, source, datasource, genome, annotation, metadata, isComputed, isGenes, minValue, maxValue, columns)
self.fileHandler = fileHandler
self.columns = columns
# ["chr", "start", "end"].append(mid)
def create_parser_object(self, type, name, columns=None):
"""Create appropriate File class based on file format
Args:
type (str): format of file
name (str): location of file
columns ([str]): list of columns from file
Returns:
An file object
"""
from ..parser.utils import create_parser_object as cpo
return cpo(type, name, columns)
# @cached(ttl=None, cache=Cache.MEMORY, serializer=PickleSerializer(), namespace="filesearchgene")
async def search_gene(self, query, maxResults):
"""Get data for a genomic region from file
Args:
chr (str): chromosome
start (int): genomic start
end (int): genomic end
Returns:
a array of matched genes
"""
result = None
err = None
logging.debug("File Measurement: %s\t%s\t%s" %(self.mid, self.name, "file_gene_search"))
try:
if self.fileHandler is None:
file = self.create_parser_object(self.mtype, self.source, self.columns)
result, err = file.search_gene(query, maxResults)
else:
result, err = await self.fileHandler.handleSearch(self.source, self.mtype, query, maxResults)
return result, str(err)
except Exception as e:
logging.error("File Measurement: %s\t%s\t%s" %(self.mid, self.name, "file_gene_search"), exc_info=True)
return {}, str(e)
# @cached(ttl=None, cache=Cache.MEMORY, serializer=PickleSerializer(), namespace="filegetdata")
async def get_data(self, chr, start, end, bins, bin=True):
"""Get data for a genomic region from file
Args:
chr (str): chromosome
start (int): genomic start
end (int): genomic end
bin (bool): True to bin the results, defaults to False
Returns:
a dataframe with results
"""
result = None
err = None
logging.debug("File Measurement: %s\t%s\t%s" %(self.mid, self.name, "file_get_data"))
try:
if self.fileHandler is None:
file = self.create_parser_object(self.mtype, self.source, self.columns)
result, err = file.getRange(chr, start, end, bins=bins)
else:
result, err = await self.fileHandler.handleFile(self.source, self.mtype, chr, start, end, bins=bins)
# rename columns from score to mid for BigWigs
if self.mtype in ["BigWig", "bigwig", "bw", "bigWig"]:
result = result.rename(columns={'score': self.mid})
elif self.mtype in ['Tabix', 'tabix', 'tbx'] and not self.isGenes:
result.columns = ["chr", "start", "end"].extend(self.columns)
cols = ["chr", "start", "end"]
cols.append(self.mid)
result = result[cols]
elif self.mtype == "tiledb":
cols = ["chr", "start", "end", self.mid]
cols.extend(self.metadata)
result = result[cols]
result = result.fillna(0)
elif self.mtype == "splicing":
pass
if bin and not self.isGenes and self.mtype not in ["tiledb", "interaction_bigbed"]:
# result, err = await self.fileHandler.binFileData(self.source, self.mtype, result, chr, start, end,
# bins, columns=self.get_columns(), metadata=self.metadata)
result, err = self.bin_rows(result, chr, start, end, bins=bins,
columns=self.get_columns(), metadata=self.metadata)
return result, str(err)
except Exception as e:
logging.error("File Measurement: %s\t%s\t%s" %(self.mid, self.name, "file_get_data"), exc_info=True)
return {}, str(e)
async def get_status(self):
result = 0
err = None
logging.debug("File Measurement: %s\t%s\t%s" %(self.mid, self.name, "file_get_status"))
file = self.create_parser_object(self.mtype, self.source, self.columns)
result, err = file.get_status()
return result, err
class ComputedMeasurement(Measurement):
"""
Class for representing computed measurements
In addition to params on base `Measurement` class -
Args:
computeFunc: a `NumPy` function to apply on our dataframe
source: defaults to 'computed'
datasource: defaults to 'computed'
"""
def __init__(self, mtype, mid, name, measurements, source="computed", computeFunc=None, datasource="computed", genome=None, annotation={"group": "computed"}, metadata=None, isComputed=True, isGenes=False, fileHandler=None, columns=None, computeAxis=1):
super(ComputedMeasurement, self).__init__(mtype, mid, name, source, datasource, genome, annotation, metadata, isComputed, isGenes, columns=columns)
self.measurements = measurements
self.computeFunc = computeFunc
self.fileHandler = fileHandler
self.computeAxis = computeAxis
def get_columns(self):
columns = []
for m in self.measurements:
columns.append(m.mid)
return columns
def computeWrapper(self, computeFunc, columns):
"""a wrapper for the 'computeFunc' function
Args:
computeFunc: a `NumPy` compute function
columns: columns from file to apply
Returns:
a dataframe with results
"""
def computeApply(row):
rowVals = []
for k in row.keys():
if k in columns:
rowVals.append(row[k])
if None in rowVals:
return None
return computeFunc(rowVals)
return computeApply
# @cached(ttl=None, cache=Cache.MEMORY, serializer=PickleSerializer(), namespace="computedgetdata")
async def get_data(self, chr, start, end, bins, dropna=True):
"""Get data for a genomic region from files and apply the `computeFunc` function
Args:
chr (str): chromosome
start (int): genomic start
end (int): genomic end
dropna (bool): True to dropna from a measurement since any computation is going to fail on this row
Returns:
a dataframe with results
"""
logging.error("Computed Measurement: %s\t%s\t%s" %(self.mid, self.name, "file_get_data"))
result = []
err = None
tbin = True
if len(self.measurements) == 1:
tbin = False
futures = []
for measurement in self.measurements:
future = measurement.get_data(chr, start, end, bins, bin=tbin)
futures.append(future)
for future in futures:
mea_result, err = await future
result.append(mea_result)
result = pd.concat(result, axis=1)
result = result.loc[:,~result.columns.duplicated()]
if dropna:
result = result.dropna()
try:
if self.computeFunc:
columns = self.get_columns()
result_copy = result[columns]
for c in columns:
result_copy[c] = result[c].apply(float)
result[self.mid] = result_copy.apply(self.computeFunc, self.computeAxis)
result[self.mid] = result[self.mid].apply(float)
# result[self.mid].astype('int64')
# result[self.mid] = result.apply(self.computeWrapper(self.computeFunc, columns), axis=1)
return result, str(err)
except Exception as e:
logging.error("Computed Measurement: %s\t%s\t%s" %(self.mid, self.name, "file_get_data"), exc_info=True)
return {}, str(e)
class WebServerMeasurement(Measurement):
"""
Class representing a web server measurement
In addition to params from the base measurement class, source is now server API endpoint
"""
def __init__(self, mtype, mid, name, source, datasource, datasourceGroup, annotation=None, metadata=None, isComputed=False, isGenes=False, minValue=None, maxValue=None):
super(WebServerMeasurement, self).__init__(mtype, mid, name, source, datasource, annotation, metadata, isComputed, isGenes, minValue, maxValue)
self.version = 5
self.datasourceGroup = datasourceGroup
def get_data(self, chr, start, end, bin=False, requestId=randrange(1000)):
"""Get data for a genomic region from the API
Args:
chr (str): chromosome
start (int): genomic start
end (int): genomic end
bin (bool): True to bin the results, defaults to False
Returns:
a dataframe with results
"""
params = {
'requestId': requestId,
'version': self.version,
'action': 'getData',
'datasourceGroup': self.datasourceGroup,
'datasource': self.datasource,
'measurement': self.mid,
'seqName': chr,
'start': start,
'end': end
}
try:
if self.annotation["datatype"] == "peak":
params["action"] = "getRows"
del params["measurement"]
params["datasource"] = self.mid
result = requests.get(self.source, params=params)
# res = umsgpack.unpackb(result.content)
res = result.json()
data = res['data']
dataF = None
if self.annotation["datatype"] == "peak":
start = np.cumsum(data['values']['start'])
start = start.astype(int)
end = np.cumsum(data['values']['end'])
end = end.astype(int)
chr = data['values']['chr']
dataF = pd.DataFrame(list(zip(chr, start, end)), columns = ['chr', 'start', "end"])
else:
if data['rows']['useOffset']:
data['rows']['values']['start'] = np.cumsum(data['rows']['values']['start'])
data['rows']['values']['end'] = np.cumsum(data['rows']['values']['end'])
# convert json to dataframe
records = {}
for key in data['rows']['values'].keys():
if key not in ["id", "strand", "metadata"]:
records[key] = data['rows']['values'][key]
for key in data['rows']['values']['metadata'].keys():
records[key] = data['rows']['values']['metadata'][key]
for key in data['values']['values'].keys():
records[key] = data['values']['values'][key]
dataF = pd.DataFrame(records)
return dataF, None
except Exception as e:
return {}, str(e)
| 35.27674
| 256
| 0.572384
|
4a05741363f9bec61539afbf7f70c12ef818ad24
| 6,671
|
py
|
Python
|
examples/echo/tests.py
|
goodboy/pulsar
|
e4b42d94b7e262a165782747d65f8b39fb8d3ba9
|
[
"BSD-3-Clause"
] | 1
|
2020-11-30T07:36:57.000Z
|
2020-11-30T07:36:57.000Z
|
examples/echo/tests.py
|
goodboy/pulsar
|
e4b42d94b7e262a165782747d65f8b39fb8d3ba9
|
[
"BSD-3-Clause"
] | null | null | null |
examples/echo/tests.py
|
goodboy/pulsar
|
e4b42d94b7e262a165782747d65f8b39fb8d3ba9
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from asyncio import gather
from pulsar import (send, new_event_loop, get_application,
run_in_loop, get_event_loop)
from pulsar.apps.test import dont_run_with_thread
from examples.echo.manage import server, Echo, EchoServerProtocol
class TestEchoServerThread(unittest.TestCase):
concurrency = 'thread'
server_cfg = None
@classmethod
async def setUpClass(cls):
s = server(name=cls.__name__.lower(), bind='127.0.0.1:0',
backlog=1024, concurrency=cls.concurrency)
cls.server_cfg = await send('arbiter', 'run', s)
cls.client = Echo(cls.server_cfg.addresses[0])
@classmethod
def tearDownClass(cls):
if cls.server_cfg:
return send('arbiter', 'kill_actor', cls.server_cfg.name)
# TEST THE SERVER APPLICATION
async def test_server_on_arbiter(self):
app = await get_application(self.__class__.__name__.lower())
cfg = app.cfg
self.assertTrue(cfg.addresses)
self.assertTrue(cfg.address)
self.assertNotEqual(cfg.addresses[0], cfg.address)
def test_server(self):
server = self.server_cfg.app()
self.assertTrue(server)
self.assertEqual(server.cfg.callable, EchoServerProtocol)
self.assertTrue(server.cfg.addresses)
# TEST CLIENT INTERACTION
async def test_ping(self):
result = await self.client(b'ciao luca')
self.assertEqual(result, b'ciao luca')
async def test_large(self):
'''Echo a 3MB message'''
msg = b''.join((b'a' for x in range(2**13)))
result = await self.client(msg)
self.assertEqual(result, msg)
async def test_multi(self):
result = await gather(self.client(b'ciao'),
self.client(b'pippo'),
self.client(b'foo'))
self.assertEqual(len(result), 3)
self.assertTrue(b'ciao' in result)
self.assertTrue(b'pippo' in result)
self.assertTrue(b'foo' in result)
# TESTS FOR PROTOCOLS AND CONNECTIONS
async def test_client(self):
await self.test_multi()
c = self.client
self.assertTrue(c.pool.available)
async def test_info(self):
info = await send(self.server_cfg.name, 'info')
self.assertIsInstance(info, dict)
self.assertEqual(info['actor']['name'], self.server_cfg.name)
self.assertEqual(info['actor']['concurrency'], self.concurrency)
async def test_connection(self):
client = Echo(self.server_cfg.addresses[0], full_response=True)
response = await client(b'test connection')
self.assertEqual(response.buffer, b'test connection')
connection = response.connection
self.assertTrue(str(connection))
async def test_connection_pool(self):
'''Test the connection pool. A very important test!'''
client = Echo(self.server_cfg.addresses[0], pool_size=2)
self.assertEqual(client._loop, get_event_loop())
#
self.assertEqual(client.pool.pool_size, 2)
self.assertEqual(client.pool.in_use, 0)
self.assertEqual(client.pool.available, 0)
self.assertEqual(client.sessions, 0)
self.assertEqual(client._requests_processed, 0)
#
response = await client(b'test connection')
self.assertEqual(response, b'test connection')
self.assertEqual(client.pool.in_use, 0)
self.assertEqual(client.pool.available, 1)
self.assertEqual(client.sessions, 1)
self.assertEqual(client._requests_processed, 1)
#
response = await client(b'test connection 2')
self.assertEqual(response, b'test connection 2')
self.assertEqual(client.pool.in_use, 0)
self.assertEqual(client.pool.available, 1)
self.assertEqual(client.sessions, 1)
self.assertEqual(client._requests_processed, 2)
#
result = await gather(client(b'ciao'),
client(b'pippo'),
client(b'foo'))
self.assertEqual(len(result), 3)
self.assertTrue(b'ciao' in result)
self.assertTrue(b'pippo' in result)
self.assertTrue(b'foo' in result)
self.assertEqual(client.pool.in_use, 0)
self.assertEqual(client.pool.available, 2)
self.assertEqual(client.sessions, 2)
self.assertEqual(client._requests_processed, 5)
#
# drop a connection
await run_in_loop(client._loop, self._drop_conection, client)
#
result = await gather(client(b'ciao'),
client(b'pippo'),
client(b'foo'))
self.assertEqual(len(result), 3)
self.assertEqual(client.pool.in_use, 0)
self.assertEqual(client.pool.available, 2)
self.assertEqual(client.sessions, 3)
self.assertEqual(client._requests_processed, 8)
#
await run_in_loop(client._loop, client.pool.close)
#
self.assertEqual(client.pool.in_use, 0)
self.assertEqual(client.pool.available, 0)
self.assertEqual(client.sessions, 3)
self.assertEqual(client._requests_processed, 8)
def _drop_conection(self, client):
conn1 = client.pool._queue.get_nowait()
conn1.close()
conn2 = client.pool._queue.get_nowait()
client.pool._queue.put_nowait(conn1)
client.pool._queue.put_nowait(conn2)
@dont_run_with_thread
class TestEchoServerProcess(TestEchoServerThread):
concurrency = 'process'
def sync_client(self):
return Echo(self.server_cfg.addresses[0], loop=new_event_loop())
async def setUp(self):
result = await self.client(b'ciao luca')
self.assertEqual(result, b'ciao luca')
# TEST SYNCHRONOUS CLIENT
async def __test_sync_echo(self):
loop = get_event_loop()
await loop.run_in_executor(None, self._test_sync_echo)
async def __test_sync_close(self):
loop = get_event_loop()
await loop.run_in_executor(None, self._test_sync_close)
def _test_sync_echo(self):
echo = self.sync_client()
self.assertEqual(echo(b'ciao!'), b'ciao!')
self.assertEqual(echo(b'fooooooooooooo!'), b'fooooooooooooo!')
def _test_sync_close(self):
# TODO: fix this. Issue #251
echo = self.sync_client()
self.assertEqual(echo(b'ciao!'), b'ciao!')
self.assertEqual(echo.sessions, 1)
self.assertEqual(echo(b'QUIT'), b'QUIT')
self.assertEqual(echo.sessions, 1)
# self.assertEqual(echo(b'ciao!'), b'ciao!')
# self.assertEqual(echo.sessions, 2)
| 37.477528
| 72
| 0.638285
|
4a0574c7adec5bdef701d729da2ef25bf924d988
| 1,087
|
py
|
Python
|
Curso Em Video-python/PYTHON (MUNDO 1, MUNDO 2 E MUNDO 3)/pythonAulas/aula20-funcoes-parte1.py
|
AlamoVinicius/code-pratice
|
924a3ff782caf3695bbeeac39fa02fb23781cd75
|
[
"MIT"
] | null | null | null |
Curso Em Video-python/PYTHON (MUNDO 1, MUNDO 2 E MUNDO 3)/pythonAulas/aula20-funcoes-parte1.py
|
AlamoVinicius/code-pratice
|
924a3ff782caf3695bbeeac39fa02fb23781cd75
|
[
"MIT"
] | null | null | null |
Curso Em Video-python/PYTHON (MUNDO 1, MUNDO 2 E MUNDO 3)/pythonAulas/aula20-funcoes-parte1.py
|
AlamoVinicius/code-pratice
|
924a3ff782caf3695bbeeac39fa02fb23781cd75
|
[
"MIT"
] | null | null | null |
def soma(a, b):
print(f'a = {a} e b = {b}') # definição da minha função soma para usar no programa principal
s = a + b
print(f'A soma de A + B = {s}')
# programa principal
soma(b=4, a=5) # a ordem pode ser alterada no parâmentro
soma(8, 9)
soma(2, 1)
'''é possivel passar vários parâmentros usando o "*", desta forma empacotamos tudo dentro da minha váriavel da função
empacotando todos os valores em uma tupla'''
def contador(* num):
tam = len(num)
print(tam)
contador(1, 2, 1, 4, 3, 2)
contador(2, 4)
print('=' * 30)
print(f'{"another way":^30}')
print('=' * 30)
# tambem posso trabalhar com listas:
def dobra(lista):
pos = 0
while pos < len(lista):
lista[pos] *= 2
pos += 1
valores = [1, 3, 4, 6, 5]
dobra(valores) # criei uma função que dobra todos os valores da lista ocmo pode ser percebido
print(valores)
# soma de vários valores usando o *
def soma_varios(* numbers):
s = 0
for num in numbers:
s += num
print(f'somando os valores {numbers} temos o total de {s}')
soma_varios(2, 3, 4, 5, 1)
| 20.903846
| 117
| 0.623735
|
4a0574e4a2f064c2473851ccfa93963e23ab4fba
| 1,650
|
py
|
Python
|
reversible2/gaussian.py
|
robintibor/reversible2
|
e6fea33ba41c7f76ee50295329b4ef27b879a7fa
|
[
"MIT"
] | null | null | null |
reversible2/gaussian.py
|
robintibor/reversible2
|
e6fea33ba41c7f76ee50295329b4ef27b879a7fa
|
[
"MIT"
] | null | null | null |
reversible2/gaussian.py
|
robintibor/reversible2
|
e6fea33ba41c7f76ee50295329b4ef27b879a7fa
|
[
"MIT"
] | null | null | null |
import torch as th
import numpy as np
# For truncated logic see:
# https://discuss.pytorch.org/t/implementing-truncated-normal-initializer/4778/12
# torch.fmod(torch.randn(size),2)
def get_gauss_samples(n_samples, mean, std, truncate_to=None):
if mean.is_cuda:
orig_samples = th.cuda.FloatTensor(n_samples, len(mean)).normal_(0, 1)
else:
orig_samples = th.FloatTensor(n_samples, len(mean)).normal_(0, 1)
if truncate_to is not None:
orig_samples = th.fmod(orig_samples, truncate_to)
orig_samples = th.autograd.Variable(orig_samples)
samples = (orig_samples * std.unsqueeze(0)) + mean.unsqueeze(0)
return samples
def get_gaussian_log_probs(mean, log_std, outs):
demeaned = outs - mean.unsqueeze(0)
unnormed_log_probs = -(demeaned ** 2) / (2 * (th.exp(log_std) ** 2))
log_probs = unnormed_log_probs - np.log(float(np.sqrt(2 * np.pi))) - log_std
log_probs = th.sum(log_probs, dim=1)
return log_probs
def transform_gaussians_by_dirs(means, stds, directions):
# directions is directions x dims
# means is clusters x dims
# stds is clusters x dims
transformed_means = th.mm(means, directions.transpose(1, 0)).transpose(1, 0)
# transformed_means is now
# directions x clusters
stds_for_dirs = stds.transpose(1, 0).unsqueeze(0) # 1 x dims x clusters
transformed_stds = th.sqrt(th.sum(
(directions * directions).unsqueeze(2) *
(stds_for_dirs * stds_for_dirs),
dim=1))
# transformed_stds is now
# directions x clusters
# so switch both back to clusters x directions
return transformed_means.t(), transformed_stds.t()
| 40.243902
| 81
| 0.698788
|
4a0574fd5e7ea05f28020b7f84b0b65ea2b6371e
| 230
|
py
|
Python
|
Projects/Investigations/Docker/hello.py
|
ptpro3/ptpro3.github.io
|
5084d42395d8af467336f0664257058a6b63e86b
|
[
"MIT"
] | 2
|
2019-05-31T01:04:14.000Z
|
2019-06-21T11:38:28.000Z
|
Projects/Investigations/Docker/hello.py
|
ptpro3/ptpro3.github.io
|
5084d42395d8af467336f0664257058a6b63e86b
|
[
"MIT"
] | null | null | null |
Projects/Investigations/Docker/hello.py
|
ptpro3/ptpro3.github.io
|
5084d42395d8af467336f0664257058a6b63e86b
|
[
"MIT"
] | 4
|
2017-10-24T02:11:17.000Z
|
2021-07-19T08:23:48.000Z
|
from flask import Flask
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World! - dockerized version'
if __name__ == '__main__':
app.run(debug=True,host='0.0.0.0')
| 25.555556
| 58
| 0.695652
|
4a057574b02bb2cea2779c0ff22f5aa7edee1f6c
| 3,567
|
py
|
Python
|
rankguru/tests.py
|
Dapro9706/rankguru
|
4a90ef26257dcf1f3624810edfc9019e503aa72e
|
[
"MIT"
] | 2
|
2021-09-15T13:03:18.000Z
|
2021-09-15T13:17:25.000Z
|
rankguru/tests.py
|
Dapro9706/rankguru
|
4a90ef26257dcf1f3624810edfc9019e503aa72e
|
[
"MIT"
] | null | null | null |
rankguru/tests.py
|
Dapro9706/rankguru
|
4a90ef26257dcf1f3624810edfc9019e503aa72e
|
[
"MIT"
] | 1
|
2021-09-15T12:58:51.000Z
|
2021-09-15T12:58:51.000Z
|
from .AbstractAPI import AbstractAPI
from .errors import QPidError, TBidError
from .utils import handle_response
from .globals import PLACE_HOLDER
class RG(AbstractAPI):
"""
The main API interface for the module
Creates new RG object, raises ``AuthError`` if header is invalid
:type header: dict
:param header: Auth header obtained from rankguru
"""
def __init__(self, header: dict):
super().__init__(
header,
nova = f'https://rest.rankguru.com/tests?textbook={PLACE_HOLDER}',
graphql = 'https://api.rankguru.com/graphql'
)
def get_ans(self, QUESTION_PAPER_ID:str):
"""
This function return a sorted dictionary of answers for the given Question Paper
:param QUESTION_PAPER_ID: The Question Paper Id of the exam
:type QUESTION_PAPER_ID: str
:raises QPidError: If QPid is invalid
:raises AuthError: If the header is invalid
:return: Returns a sorted dictionary of question number - answer pairs
:rtype: dict
"""
r = self.get_ans_raw (QUESTION_PAPER_ID)['data']['QuestionEvaluation']
if not r:
raise QPidError
r = r['evaluatedData']
ret = {}
for i in r:
ret[int (i['questionNo'])] = " ".join ([chr (ord (i) + 48) for i in i['key']])
keys = [*ret.keys()]
keys.sort()
return {i:ret[i] for i in keys}
def get_ans_raw(self, QUESTION_PAPER_ID:str):
"""
Returns raw json output from API retrieval
:param QUESTION_PAPER_ID: The Question Paper Id of the exam
:type QUESTION_PAPER_ID: str
:raises AuthError: If the header is invalid
:return: Raw json as python dict
:rtype: dict
"""
ANS_QUERY = '{ QuestionEvaluation(input: {questionPaperId: "'+QUESTION_PAPER_ID+'" }) \
{ evaluatedData { questionNo key } } }'
r = self.handler.make ("graphql", data={'query':ANS_QUERY})
handle_response(r.status_code)
return r.json ()
def get_tests_raw(self, TEXT_BOOK_ID:str):
"""
Returns raw json output from API retrieval
:param TEXT_BOOK_ID: The Textbook Id of the category
:type TEXT_BOOK_ID: str
:raises AuthError: If the header is invalid
:return: Raw json as python dict
:rtype: dict
"""
r = self.handler.make ("nova", replace={PLACE_HOLDER:str(TEXT_BOOK_ID)},mode="get")
handle_response(r.status_code)
return r.json ()
def get_tests(self, TEXT_BOOK_ID:str, latest_first = True):
"""
This function return a sorted dictionary of tests for the given Textbook
:param TEXT_BOOK_ID: The Textbook Id of the exam
:type TEXT_BOOK_ID: str
:param latest_first: Toggle it to get earlier tests first, defaults to True
:type latest_first: bool, optional
:raises TBidError: If TBid is invalid
:raises AuthError: If the header is invalid
:return: Returns a dictionary of test name - QPid pairs
:rtype: dict
"""
r = self.get_tests_raw (TEXT_BOOK_ID)
if 'STATUS' in r.keys ():
raise TBidError
keys = [*r.keys ()]
if latest_first:
keys.reverse ()
ret = {}
for i in keys:
test = r[i]
ret[test['testName']] = test['questionPaperId']
return ret
| 28.766129
| 95
| 0.590132
|
4a0576020c296e835e288019e32c711586f6ab65
| 90
|
py
|
Python
|
info/modules/index/__init__.py
|
zxallen/Information
|
1c5f36bbed2f20c078c46760cc5ff11699bc9aa2
|
[
"MIT"
] | 2
|
2018-12-24T02:42:44.000Z
|
2018-12-24T11:43:13.000Z
|
info/modules/index/__init__.py
|
zxallen/Information
|
1c5f36bbed2f20c078c46760cc5ff11699bc9aa2
|
[
"MIT"
] | null | null | null |
info/modules/index/__init__.py
|
zxallen/Information
|
1c5f36bbed2f20c078c46760cc5ff11699bc9aa2
|
[
"MIT"
] | 1
|
2018-12-26T01:31:55.000Z
|
2018-12-26T01:31:55.000Z
|
from flask import Blueprint
index_blu = Blueprint("index",__name__)
from .views import *
| 18
| 39
| 0.777778
|
4a05771d693f40e7f4705bdaa92319e0dddcf4d7
| 267
|
py
|
Python
|
app.py
|
lunixbr/devopslab
|
609e3687ec3feeddd9be5a8767b6d277f8d8bcec
|
[
"MIT"
] | null | null | null |
app.py
|
lunixbr/devopslab
|
609e3687ec3feeddd9be5a8767b6d277f8d8bcec
|
[
"MIT"
] | null | null | null |
app.py
|
lunixbr/devopslab
|
609e3687ec3feeddd9be5a8767b6d277f8d8bcec
|
[
"MIT"
] | null | null | null |
#print("Hello World")
from flask import Flask
from flask_wtf.csrf import CSRFProtect
app = Flask(__name__)
csrf = CSRFProtect(app)
@app.route("/")
def pagina_inicial():
return "Laboratorio Pipeline DevOps"
if __name__ == '__main__':
app.run(debug=True)
| 15.705882
| 40
| 0.719101
|
4a05774db9c2f75475e871722d80c2e7094dcbb3
| 1,794
|
py
|
Python
|
2018/finals/pwn-mojo/attachments/service.py
|
iicarus-bit/google-ctf
|
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
|
[
"Apache-2.0"
] | 2,757
|
2018-04-28T21:41:36.000Z
|
2022-03-29T06:33:36.000Z
|
2018/finals/pwn-mojo/attachments/service.py
|
iicarus-bit/google-ctf
|
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
|
[
"Apache-2.0"
] | 20
|
2019-07-23T15:29:32.000Z
|
2022-01-21T12:53:04.000Z
|
2018/finals/pwn-mojo/attachments/service.py
|
iicarus-bit/google-ctf
|
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
|
[
"Apache-2.0"
] | 449
|
2018-05-09T05:54:05.000Z
|
2022-03-30T14:54:18.000Z
|
#! /usr/bin/python
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import sys
import time
def main():
server = sys.stdin.readline()
server_len = 0
while server_len < 128 and (server[server_len] == '.' or server[server_len] == ':' or server[server_len].isalnum()):
server_len += 1
server = server[:server_len]
args = [
'/home/user/chrome/chrome',
'--disable-gpu',
'--headless', '--timeout=15000', '--dump-dom',
'https://' + server,
]
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
time.sleep(0.1)
with open('/proc/{}/maps'.format(p.pid), 'r') as tmp:
map = tmp.read().rstrip('\n')
out, err = p.communicate()
print '-- map ---------------------------------------------------------------'
print map
print '----------------------------------------------------------------------'
print '-- out ---------------------------------------------------------------'
print out
print '----------------------------------------------------------------------'
print '-- err ---------------------------------------------------------------'
print err
print '----------------------------------------------------------------------'
if __name__ == '__main__':
main()
| 30.931034
| 118
| 0.520624
|
4a05779b562094a408160ac73a859f8c4a04e429
| 12,481
|
py
|
Python
|
lingvo/tasks/asr/decoder_metrics.py
|
lukasbindreiter/lingvo
|
d6f2e6901fadc8440a9e6222ac54b68a8b6faf02
|
[
"Apache-2.0"
] | 1
|
2021-04-18T18:17:02.000Z
|
2021-04-18T18:17:02.000Z
|
lingvo/tasks/asr/decoder_metrics.py
|
lukasbindreiter/lingvo
|
d6f2e6901fadc8440a9e6222ac54b68a8b6faf02
|
[
"Apache-2.0"
] | null | null | null |
lingvo/tasks/asr/decoder_metrics.py
|
lukasbindreiter/lingvo
|
d6f2e6901fadc8440a9e6222ac54b68a8b6faf02
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Speech model decoder metrics."""
import collections
import lingvo.compat as tf
from lingvo.core import base_layer
from lingvo.core import metrics
from lingvo.core import py_utils
from lingvo.tasks.asr import decoder_utils
# hyps: [num_beams, num_hyps_per_beam] of serialized Hypothesis protos.
# ids: [num_beams * num_hyps_per_beam, max_target_length].
# lens: [num_beams * num_hyps_per_beam].
# scores: [num_beams, num_hyps_per_beam].
# decoded: [num_beams, num_hyps_per_beam].
DecoderTopK = collections.namedtuple(
'topk', ['hyps', 'ids', 'lens', 'scores', 'decoded']) # pyformat: disable
def BeamSearchDecodeOutputToDecoderTopK(decoder_outs,
*,
ids_to_strings_fn,
tag=''):
"""Converts BeamSearchDecodeOutput to DecoderTopK.
As a side-effect, also creates TF nodes used by eval pipelines
("top_k_decoded" and "top_k_scores").
Args:
decoder_outs: a beam_search_helper.BeamSearchDecodeOutput instance.
ids_to_strings_fn: a function of (ids, lens) -> strings, where ids has shape
[batch, length], lens has shape [batch], and strings has shape [batch].
tag: optional tag for tf.identity() names.
Returns:
A DecoderTopK instance.
"""
hyps = decoder_outs.topk_hyps
ids = decoder_outs.topk_ids
lens = tf.identity(decoder_outs.topk_lens, name='TopKLabelLengths' + tag)
scores = decoder_outs.topk_scores
decoded = decoder_outs.topk_decoded
if decoder_outs.topk_ids is not None:
ids = tf.identity(ids, name='TopKLabelIds' + tag)
decoded = ids_to_strings_fn(ids, lens - 1)
decoded = tf.identity(decoded, name='top_k_decoded%s' % tag)
decoded = tf.reshape(decoded, tf.shape(hyps))
if scores is not None and hyps is not None:
scores = tf.identity(
tf.reshape(scores, tf.shape(lens)), name='top_k_scores%s' % tag)
scores = tf.reshape(scores, tf.shape(hyps))
return DecoderTopK(hyps, ids, lens, scores, decoded)
class DecoderMetrics(base_layer.BaseLayer):
"""Speech model decoder metrics."""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'include_auxiliary_metrics', True,
'In addition to simple WER, also computes oracle WER, SACC, TER, etc. '
'Turning off this option will speed up the decoder job.')
p.Define(
'log_utf8', False,
'If True, decodes reference and hypotheses bytes to UTF-8 for logging.')
return p
def __init__(self, params):
if not params.name:
raise ValueError('params.name not set.')
super().__init__(params)
p = self.params
def GetTopK(self, decoder_outs, ids_to_strings_fn, tag=''):
return BeamSearchDecodeOutputToDecoderTopK(
decoder_outs, ids_to_strings_fn=ids_to_strings_fn, tag=tag)
def ComputeNormalizedWER(self, hyps, refs, num_hyps_per_beam):
# Filter out all '<epsilon>' tokens for norm_wer computation.
hyps_no_epsilon = tf.strings.regex_replace(hyps, '(<epsilon>)+', ' ')
# norm_wer is size [num_transcripts * hyps_per_beam, 2]
norm_wer = decoder_utils.ComputeWer(hyps_no_epsilon, refs)
# Split into two tensors of size [num_transcripts * hyps_per_beam, 1]
norm_wer_errors, norm_wer_words = tf.split(norm_wer, [1, 1], 1)
shape = [-1, num_hyps_per_beam]
norm_wer_errors = tf.reshape(norm_wer_errors, shape)
norm_wer_words = tf.reshape(norm_wer_words, shape)
return norm_wer_errors, norm_wer_words
def AddAdditionalDecoderMetricsToGraph(self, topk_hyps, filtered_hyps,
filtered_refs, input_batch,
decoder_outs):
"""Returns a dict of metrics which should be computed from decoded hyps."""
# The base class implementation returns an empty dictionary. Sub-classes can
# provide their own implementation.
return {}
def ComputeMetrics(self, decoder_outs, input_batch, ids_to_strings_fn):
"""Computes metrics on output from decoder.
Args:
decoder_outs: A `BeamSearchDecodeOutput`, a namedtuple containing the
decode results.
input_batch: A `NestedMap` of tensors representing the source, target,
and other components of the input batch.
ids_to_strings_fn: a function of (ids, lens) -> strings, where ids has
shape [batch, length], lens has shape [batch], and strings has shape
[batch].
Returns:
A dict of Tensors containing decoder output and metrics.
"""
topk = self.GetTopK(decoder_outs, ids_to_strings_fn=ids_to_strings_fn)
tgt_batch = tf.shape(topk.scores)[0]
num_hyps_per_beam = tf.shape(topk.scores)[1]
tgt = input_batch.tgt
tgt_lens = tf.cast(tf.round(tf.reduce_sum(1.0 - tgt.paddings, 1)), tf.int32)
tgt_lens = py_utils.HasShape(tgt_lens, [tgt_batch])
transcripts = ids_to_strings_fn(tgt.labels, tgt_lens - 1)
# Filter out all isolated '<noise>' tokens.
noise_pattern = ' <noise> |^<noise> | <noise>$|^<noise>$'
filtered_refs = tf.strings.regex_replace(transcripts, noise_pattern, ' ')
filtered_hyps = tf.strings.regex_replace(topk.decoded, noise_pattern, ' ')
# Compute translation quality scores for all hyps.
filtered_refs = tf.tile(
tf.reshape(filtered_refs, [-1, 1]), [1, num_hyps_per_beam])
filtered_hyps = tf.reshape(filtered_hyps, [-1])
filtered_refs = tf.reshape(filtered_refs, [-1])
tf.logging.info('filtered_refs=%s', filtered_refs)
norm_wer_errors, norm_wer_words = self.ComputeNormalizedWER(
filtered_hyps, filtered_refs, num_hyps_per_beam)
ret_dict = {
'target_ids': tgt.ids,
'target_labels': tgt.labels,
'target_weights': tgt.weights,
'target_paddings': tgt.paddings,
'transcripts': transcripts,
'topk_decoded': topk.decoded,
'topk_ids': topk.ids,
'topk_lens': topk.lens,
'topk_scores': topk.scores,
'norm_wer_errors': norm_wer_errors,
'norm_wer_words': norm_wer_words,
}
if not py_utils.use_tpu() and 'sample_ids' in input_batch:
ret_dict['utt_id'] = input_batch.sample_ids
ret_dict.update(
self.AddAdditionalDecoderMetricsToGraph(topk, filtered_hyps,
filtered_refs, input_batch,
decoder_outs))
return ret_dict
def CreateMetrics(self):
base_metrics = {
'num_samples_in_batch': metrics.AverageMetric(),
'norm_wer': metrics.AverageMetric(), # Normalized word error rate.
'corpus_bleu': metrics.CorpusBleuMetric(),
}
if self.params.include_auxiliary_metrics:
base_metrics.update({
'wer': metrics.AverageMetric(), # Word error rate.
'sacc': metrics.AverageMetric(), # Sentence accuracy.
'ter': metrics.AverageMetric(), # Token error rate.
'oracle_norm_wer': metrics.AverageMetric(),
})
return base_metrics
def PostProcess(self, dec_out_dict, dec_metrics_dict):
p = self.params
assert 'topk_scores' in dec_out_dict, list(dec_out_dict.keys())
topk_scores = dec_out_dict['topk_scores']
topk_decoded = dec_out_dict['topk_decoded']
transcripts = dec_out_dict['transcripts']
if not py_utils.use_tpu():
utt_id = dec_out_dict['utt_id']
assert len(utt_id) == len(transcripts)
norm_wer_errors = dec_out_dict['norm_wer_errors']
norm_wer_words = dec_out_dict['norm_wer_words']
target_labels = dec_out_dict['target_labels']
target_paddings = dec_out_dict['target_paddings']
topk_ids = dec_out_dict['topk_ids']
topk_lens = dec_out_dict['topk_lens']
assert len(transcripts) == len(target_labels)
assert len(transcripts) == len(target_paddings)
assert len(transcripts) == len(topk_decoded)
assert len(norm_wer_errors) == len(transcripts)
assert len(norm_wer_words) == len(transcripts)
num_samples_in_batch = len(transcripts)
dec_metrics_dict['num_samples_in_batch'].Update(num_samples_in_batch)
def GetRefIds(ref_ids, ref_paddinds):
assert len(ref_ids) == len(ref_paddinds)
return_ids = []
for i in range(len(ref_ids)):
if ref_paddinds[i] == 0:
return_ids.append(ref_ids[i])
return return_ids
total_norm_wer_errs = norm_wer_errors[:, 0].sum()
total_norm_wer_words = norm_wer_words[:, 0].sum()
dec_metrics_dict['norm_wer'].Update(
total_norm_wer_errs / total_norm_wer_words, total_norm_wer_words)
for ref_str, hyps in zip(transcripts, topk_decoded):
filtered_ref = decoder_utils.FilterNoise(ref_str)
filtered_ref = decoder_utils.FilterEpsilon(filtered_ref)
filtered_hyp = decoder_utils.FilterNoise(hyps[0])
filtered_hyp = decoder_utils.FilterEpsilon(filtered_hyp)
dec_metrics_dict['corpus_bleu'].Update(filtered_ref, filtered_hyp)
total_errs = 0
total_oracle_errs = 0
total_ref_words = 0
total_token_errs = 0
total_ref_tokens = 0
total_accurate_sentences = 0
key_value_pairs = []
if p.include_auxiliary_metrics:
for i in range(len(transcripts)):
ref_str = transcripts[i]
if not py_utils.use_tpu():
tf.logging.info('utt_id: %s', utt_id[i])
if self.cluster.add_summary:
tf.logging.info(' ref_str: %s',
ref_str.decode('utf-8') if p.log_utf8 else ref_str)
hyps = topk_decoded[i]
num_hyps_per_beam = len(hyps)
ref_ids = GetRefIds(target_labels[i], target_paddings[i])
hyp_index = i * num_hyps_per_beam
top_hyp_ids = topk_ids[hyp_index][:topk_lens[hyp_index]]
if self.cluster.add_summary:
tf.logging.info(' ref_ids: %s', ref_ids)
tf.logging.info(' top_hyp_ids: %s', top_hyp_ids)
total_ref_tokens += len(ref_ids)
_, _, _, token_errs = decoder_utils.EditDistanceInIds(
ref_ids, top_hyp_ids)
total_token_errs += token_errs
filtered_ref = decoder_utils.FilterNoise(ref_str)
filtered_ref = decoder_utils.FilterEpsilon(filtered_ref)
oracle_errs = norm_wer_errors[i][0]
for n, (score, hyp_str) in enumerate(zip(topk_scores[i], hyps)):
if self.cluster.add_summary:
tf.logging.info(' %f: %s', score,
hyp_str.decode('utf-8') if p.log_utf8 else hyp_str)
filtered_hyp = decoder_utils.FilterNoise(hyp_str)
filtered_hyp = decoder_utils.FilterEpsilon(filtered_hyp)
ins, subs, dels, errs = decoder_utils.EditDistance(
filtered_ref, filtered_hyp)
# Note that these numbers are not consistent with what is used to
# compute normalized WER. In particular, these numbers will be
# inflated when the transcript contains punctuation.
tf.logging.info(' ins: %d, subs: %d, del: %d, total: %d', ins, subs,
dels, errs)
# Only aggregate scores of the top hypothesis.
if n == 0:
total_errs += errs
total_ref_words += len(decoder_utils.Tokenize(filtered_ref))
if norm_wer_errors[i, n] == 0:
total_accurate_sentences += 1
oracle_errs = min(oracle_errs, norm_wer_errors[i, n])
total_oracle_errs += oracle_errs
dec_metrics_dict['wer'].Update(total_errs / max(1., total_ref_words),
total_ref_words)
dec_metrics_dict['oracle_norm_wer'].Update(
total_oracle_errs / max(1., total_ref_words), total_ref_words)
dec_metrics_dict['sacc'].Update(
total_accurate_sentences / len(transcripts), len(transcripts))
dec_metrics_dict['ter'].Update(
total_token_errs / max(1., total_ref_tokens), total_ref_tokens)
return key_value_pairs
| 41.465116
| 80
| 0.673664
|
4a0577b8da76747574d6eaa73be469c5c7441d13
| 19,636
|
py
|
Python
|
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/reload/nxos/reload.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/reload/nxos/reload.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/reload/nxos/reload.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
'''NXOS implementation for Reload triggers'''
# import python
import logging
# import ats
from ats import aetest
from ats.utils.objects import R
# Genie Libs
from genie.libs.sdk.libs.utils.mapping import Mapping
from genie.libs.sdk.triggers.ha.ha import \
TriggerReload as CommonReload, \
TriggerReloadLc
log = logging.getLogger(__name__)
# Trigger required data settings
# Which key to exclude for Platform Ops comparison
platform_exclude = ['maker', 'disk_used_space','disk_total_space',
'rp_uptime', 'sn', 'disk_free_space',
'image', 'kickstart_image', 'main_mem']
class TriggerReload(CommonReload):
"""Reload the whole device."""
__description__ = """Reload the whole device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
Optional:
tgn_timeout (`int`): Maximum wait time for all traffic threads to be
restored to the reference rate,
in second. Default: 60
tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,
in second. Default: 10
steps:
1. Learn Platform Ops object and store the "ok|active|ha-standby|standby" slot(s)
if has any, otherwise, SKIP the trigger
2. Do reload by command "reload"
3. Learn Platform Ops again and verify the state of RP(s) is
"active|ha-standby", verify every LC status is "ok",
and verify left attributes from the ops are the same as the Ops in step 1
4. Update platform PTS if feature pts is enabled,
Update global/local veirifications if enabled
"""
# Mapping of Information between Ops and Conf
# Also permit to dictates which key to verify
mapping = Mapping(requirements={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'rp', '(?P<rp>.*)',
'state', '(?P<status>active|ha-standby)'],
['slot', 'lc', '(?P<lc>.*)',
'state', '(?P<lc_status>ok|active|standby)']
],
'all_keys': True,
'exclude': platform_exclude}},
verify_ops={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'rp', '(?P<rp>.*)',
'state', '(active|ha-standby)'],
['slot', 'rp', '(?P<rp>.*)',
'redundancy_state', '(active|ha-standby)'],
['slot', 'lc', '(?P<lc>.*)',
'state', '(ok|active|standby)']],
'exclude': platform_exclude}},
num_values={'rp': 'all', 'lc': 'all'})
class TriggerReloadActiveSystemController(TriggerReloadLc):
"""Reload active system controller module on device."""
__description__ = """Reload active system controller module on device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
lcRole (`str`): The role of LC which is 'active'
Optional:
tgn_timeout (`int`): Maximum wait time for all traffic threads to be
restored to the reference rate,
in second. Default: 60
tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,
in second. Default: 10
steps:
1. Learn Platform Ops object and store the "active" and "standby"
system controller if has any, otherwise, SKIP the trigger
2. Do reload by command "reload module <lc>"
3. Learn Platform Ops again and verify the roles of
"active" system controller and "standby" system controller are swapped,
and verify left attributes from the ops are the same as the Ops in step 1
4. Update platform PTS if feature pts is enabled,
Update global/local veirifications if enabled
"""
# Mapping of Information between Ops and Conf
# Also permit to dictates which key to verify
mapping = Mapping(requirements={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'oc', '(?P<oc>.*)',
'state', 'active'],
['slot', 'oc', '(?P<oc>.*)',
'name', 'System Controller'],
['slot', 'oc', '(?P<standby_sys_con>.*)',
'state', 'standby'],
['slot', 'oc', '(?P<standby_sys_con>.*)',
'name', 'System Controller'],
],
'all_keys': True,
'exclude': platform_exclude}},
verify_ops={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'oc', '(?P<oc>.*)',
'state', 'standby'],
['slot', 'oc', '(?P<standby_sys_con>.*)',
'state', 'active']],
'exclude': platform_exclude}},
num_values={'oc': 1,
'standby_sys_con': 1})
class TriggerReloadStandbySystemController(TriggerReloadLc):
"""Reload standby system controller module on device."""
__description__ = """Reload standby system controller module on device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
lcRole (`str`): The role of LC which is 'standby'
Optional:
tgn_timeout (`int`): Maximum wait time for all traffic threads to be
restored to the reference rate,
in second. Default: 60
tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,
in second. Default: 10
steps:
1. Learn Platform Ops object and store the "active" and "standby"
system controller if has any, otherwise, SKIP the trigger
2. Do reload by command "reload module <lc>"
3. Learn Platform Ops again and the ops are the same as the Ops in step 1
4. Update platform PTS if feature pts is enabled,
Update global/local veirifications if enabled
"""
'''Reload standby system controller'''
# Mapping of Information between Ops and Conf
# Also permit to dictates which key to verify
mapping = Mapping(requirements={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'oc', '(?P<oc>.*)',
'state', 'standby'],
['slot', 'oc', '(?P<oc>.*)',
'name', 'System Controller'],
],
'all_keys': True,
'exclude': platform_exclude}},
verify_ops={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'oc', '(?P<oc>.*)',
'state', 'standby']],
'exclude': platform_exclude}},
num_values={'oc': 1})
class TriggerReloadFabricModule(TriggerReloadLc):
"""Reload fabric module on device."""
__description__ = """Reload fabric module on device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
Optional:
tgn_timeout (`int`): Maximum wait time for all traffic threads to be
restored to the reference rate,
in second. Default: 60
tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,
in second. Default: 10
steps:
1. Learn Platform Ops object and store the "fabric" Lc(s)
if has any, otherwise, SKIP the trigger
2. Do reload by command "reload module <lc>"
3. Learn Platform Ops again and the ops are the same as the Ops in step 1
4. Update platform PTS if feature pts is enabled,
Update global/local veirifications if enabled
"""
# Mapping of Information between Ops and Conf
# Also permit to dictates which key to verify
mapping = Mapping(requirements={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'oc', '(?P<oc>.*)',
'state', 'ok'],
['slot', 'oc', '(?P<oc>.*)',
'name', '(?P<name>.*Fabric.*)'],
],
'exclude': platform_exclude}},
verify_ops={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'oc', '(?P<oc>.*)',
'state', 'ok']],
'exclude': platform_exclude}},
num_values={'oc': 'all'})
class TriggerReloadEthernetModule(TriggerReloadLc):
"""Reload Ethernet module on device."""
__description__ = """Reload Ethernet module on device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
Optional:
tgn_timeout (`int`): Maximum wait time for all traffic threads to be
restored to the reference rate,
in second. Default: 60
tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,
in second. Default: 10
steps:
1. Learn Platform Ops object and store the "Ethernet" Lc(s)
if has any, otherwise, SKIP the trigger
2. Do reload by command "reload module <lc>"
3. Learn Platform Ops again and the ops are the same as the Ops in step 1
4. Update platform PTS if feature pts is enabled,
Update global/local veirifications if enabled
"""
# Mapping of Information between Ops and Conf
# Also permit to dictates which key to verify
mapping = Mapping(requirements={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'lc', '(?P<lc>.*)',
'state', 'ok'],
['slot', 'lc', '(?P<lc>.*)',
'name', '(?P<name>.*Ethernet.*)'],
],
'exclude': platform_exclude}},
verify_ops={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'lc', '(?P<lc>.*)',
'state', 'ok']],
'exclude': platform_exclude}},
num_values={'lc': 'all'})
class TriggerReloadActiveRP(TriggerReloadLc):
"""Reload active supervisor module on device."""
__description__ = """Reload active supervisor module on device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
lcRole (`str`): The role of LC which is 'active'
Optional:
tgn_timeout (`int`): Maximum wait time for all traffic threads to be
restored to the reference rate,
in second. Default: 60
tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,
in second. Default: 10
steps:
1. Learn Platform Ops object and store the "active" RP and "standby" RP
if has any, otherwise, SKIP the trigger
2. Do reload by command "reload module <lc>"
3. Learn Platform Ops again and verify the roles of
"active" RP and "standby" RP are swapped,
and verify left attributes from the ops are the same as the Ops in step 1
4. Update platform PTS if feature pts is enabled,
Update global/local veirifications if enabled
"""
# Mapping of Information between Ops and Conf
# Also permit to dictates which key to verify
mapping = Mapping(requirements={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'rp', '(?P<active_rp>.*)',
'redundancy_state', 'active'],
['slot', 'rp', '(?P<active_rp>.*)',
'state', 'active'],
['slot', 'rp', '(?P<standby_rp>.*)',
'redundancy_state', 'ha-standby'],
['slot', 'rp', '(?P<standby_rp>.*)',
'state', 'ha-standby'],
['slot', 'lc', '(?P<lc>.*)',
'state', '(?P<lc_status>ok|active|standby)']
],
'all_keys': True,
'exclude': platform_exclude}},
verify_ops={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'rp', '(?P<active_rp>.*)',
'redundancy_state', 'ha-standby'],
['slot', 'rp', '(?P<active_rp>.*)',
'state', 'ha-standby'],
['slot', 'rp', '(?P<standby_rp>.*)',
'redundancy_state', 'active'],
['slot', 'rp', '(?P<standby_rp>.*)',
'state', 'active'],
['slot', 'lc', '(?P<lc>.*)',
'state', '(ok|active|standby)']],
'exclude': platform_exclude}},
num_values={'active_rp':1, 'standby_rp':1, 'lc':1})
class TriggerReloadStandbyRP(TriggerReloadLc):
"""Reload standby supervisor module on device."""
__description__ = """Reload standby supervisor module on device.
trigger_datafile:
Mandatory:
timeout:
max_time (`int`): Maximum wait time for the trigger,
in second. Default: 180
interval (`int`): Wait time between iteration when looping is needed,
in second. Default: 15
lcRole (`str`): The role of LC which is 'standby'
Optional:
tgn_timeout (`int`): Maximum wait time for all traffic threads to be
restored to the reference rate,
in second. Default: 60
tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,
in second. Default: 10
steps:
1. Learn Platform Ops object and store the "standby" RP
if has any, otherwise, SKIP the trigger
2. Do reload by command "reload module <lc>"
3. Learn Platform Ops again and the ops are the same as the Ops in step 1
4. Update platform PTS if feature pts is enabled,
Update global/local veirifications if enabled
"""
# Mapping of Information between Ops and Conf
# Also permit to dictates which key to verify
mapping = Mapping(requirements={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'rp', '(?P<standby_rp>.*)',
'redundancy_state', 'ha-standby'],
['slot', 'rp', '(?P<standby_rp>.*)',
'state', 'ha-standby'],
],
'all_keys': True,
'exclude': platform_exclude}},
verify_ops={'ops.platform.platform.Platform':{
'requirements': [\
['slot', 'rp', '(?P<standby_rp>.*)',
'redundancy_state', 'ha-standby']],
'exclude': platform_exclude}},
num_values={'standby_rp':1})
| 49.964377
| 91
| 0.446832
|
4a05783c3b34eae569da77680a99f338ff61bdda
| 118
|
py
|
Python
|
configuration/configuration.py
|
filipefcl/fs-webservice-auth
|
1fb5cfe446aaf06c650495b9e8c6862e493304a6
|
[
"MIT"
] | null | null | null |
configuration/configuration.py
|
filipefcl/fs-webservice-auth
|
1fb5cfe446aaf06c650495b9e8c6862e493304a6
|
[
"MIT"
] | null | null | null |
configuration/configuration.py
|
filipefcl/fs-webservice-auth
|
1fb5cfe446aaf06c650495b9e8c6862e493304a6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class Configuration:
def __init__(self):
pass
'''
HIDDEN CODE
'''
| 13.111111
| 23
| 0.483051
|
4a05793e00d5e958efdc950d0cbcaa7c56ad2a96
| 3,456
|
py
|
Python
|
code/robotling/robotling_board.py
|
boeh-da/robotling
|
5925deb670803ece305baa0973914dfb8346524c
|
[
"MIT"
] | null | null | null |
code/robotling/robotling_board.py
|
boeh-da/robotling
|
5925deb670803ece305baa0973914dfb8346524c
|
[
"MIT"
] | null | null | null |
code/robotling/robotling_board.py
|
boeh-da/robotling
|
5925deb670803ece305baa0973914dfb8346524c
|
[
"MIT"
] | 1
|
2021-05-04T09:35:09.000Z
|
2021-05-04T09:35:09.000Z
|
# ----------------------------------------------------------------------------
# robotling_board.py
# Global definitions for robotling board.
#
# The MIT License (MIT)
# Copyright (c) 2018 Thomas Euler
# 2018-09-13, v1
# 2018-12-22, v1.1 - pins for M4 feather express added
# ----------------------------------------------------------------------------
from micropython import const
from robotling_board_version import BOARD_VER
from platform.platform import platform
__version__ = "0.1.1.0"
SPI_FRQ = const(4000000)
I2C_FRQ = const(400000)
# I2C devices, maximal clock frequencies:
# AMG88XX (Infrared Array Sensor “Grid-EYE”) <= 400 KHz
# VL6180X (Time of Flight distance sensor) <= 400 KHz
# CMPS12 (Compass) <= 400 KHz
# LSM303 (Compass) 100, 400 KHz
# LSM9DS0 (Compass) 100, 400 KHz
# ----------------------------------------------------------------------------
# Robotling board connections/pins
#
if platform.ID == platform.ENV_ESP32_UPY:
import platform.huzzah32.board as board
SCK = board.SCK
MOSI = board.MOSI
MISO = board.MISO
CS_ADC = board.D4
SCL = board.SCL
SDA = board.SDA
A_ENAB = board.D26
A_PHASE = board.D14
B_ENAB = board.D21
B_PHASE = board.D25
ENAB_5V = board.D16
RED_LED = board.LED
ADC_BAT = board.BAT
if BOARD_VER == 100:
NEOPIX = board.D15 # Connect Neopixel to DIO #0
DIO0 = board.D27
DIO1 = board.LED
DIO2 = board.D33
DIO3 = board.D15
elif BOARD_VER >= 110:
NEOPIX = board.D15 # -> Neopixel connector
DIO0 = board.D27
DIO1 = board.LED
DIO2 = board.D33
DIO3 = board.D32
elif platform.ID == platform.ENV_CPY_SAM51:
import board
SCK = board.SCK
MOSI = board.MOSI
MISO = board.MISO
CS_ADC = board.A5
SCL = board.SCL
SDA = board.SDA
A_ENAB = board.A3
# The M4 express feather does not allow PWM with pin A0, therefore to use
# robotling boards <= v1.2 requires to solder a bridge between the pins A0
# and A3.
A_PHASE = board.D5
B_ENAB = board.D4
B_PHASE = board.A1
ENAB_5V = board.RX
RED_LED = board.D13
ADC_BAT = board.VOLTAGE_MONITOR
if BOARD_VER == 100:
'''
NEOPIX = board.D9 #D15 # Connect Neopixel to DIO #0
'''
NEOPIX = board.NEOPIXEL
DIO0 = board.D11
DIO1 = board.D13
DIO2 = board.D10
DIO3 = board.D9
elif BOARD_VER >= 110:
'''
NEOPIX = board.D9 #D15 # -> Neopixel connector
'''
NEOPIX = board.NEOPIXEL
DIO0 = board.D11
DIO1 = board.D13
DIO2 = board.D10
DIO3 = board.D6
# ----------------------------------------------------------------------------
# The battery is connected to the pin via a voltage divider (1/2), and thus
# an effective voltage range of up to 7.8V (ATTN_11DB, 3.9V); the resolution
# is 12 bit (WITDH_12BIT, 4096):
# V = adc /4096 *2 *3.9 *0.901919 = 0.001717522
# (x2 because of voltage divider, x3.9 for selected range (ADC.ATTN_11DB)
# and x0.901919 as measured correction factor)
BAT_N_PER_V = 0.001717522
# ----------------------------------------------------------------------------
# Error codes
#
RBL_OK = const(0)
RBL_ERR_DEVICE_NOT_READY = const(-1)
RBL_ERR_SPI = const(-2)
# ...
# ----------------------------------------------------------------------------
| 27.212598
| 78
| 0.537616
|
4a057a4ac49445696fc96328e5ca5f6e5bdd3855
| 4,750
|
py
|
Python
|
project_euler/001-050/14.py
|
floppp/programming_challenges
|
42df1b72faf5ddf907296f90e9b14e014d2ea13b
|
[
"MIT"
] | null | null | null |
project_euler/001-050/14.py
|
floppp/programming_challenges
|
42df1b72faf5ddf907296f90e9b14e014d2ea13b
|
[
"MIT"
] | null | null | null |
project_euler/001-050/14.py
|
floppp/programming_challenges
|
42df1b72faf5ddf907296f90e9b14e014d2ea13b
|
[
"MIT"
] | null | null | null |
## VERSION RECURSIVA SIN MEJORAS
# Tiempo: 55.16645 segundos (con operacion binaria aprox lo mismo)
# Resultado 837.799
import time
def collatz(n, lista):
lista.append(n)
if n == 1:
return 1, lista
elif n % 2:
return collatz(int(n*3 + 1), lista)
else:
return collatz(n >> 1, lista)
start = time.time()
max_len = max_num = 0
lista = []
for i in range(1, 27):
lista = collatz(i, [])[1]
length = len(lista)
if length > max_len:
max_len = length
max_num = i
print("La cadena del numero {} tiene {} elementos".format(max_num, max_len))
print("Tiempo: {:.5f} segundos".format(time.time() - start))
# La cadena del numero 25 tiene 24 elementos
# Tiempo: 0.00043 segundos
## VERSION RECURSIVA - CONTADOR EN VEZ DE LISTA
# Tiempo: 46.91166 segundos; MEJOR que con listas
# Resultado 837.799
import time
def collatz(n, count):
count += 1
if n == 1:
return 1, count
elif n % 2:
return collatz(int(n*3 + 1), count)
else:
return collatz(n >> 1, count)
start = time.time()
max_len = max_num = 0
length = 0
for i in range(1, 10000):
length = collatz(i, 0)[1]
if length > max_len:
max_len = length
max_num = i
print("La cadena del numero {} tiene {} elementos".format(max_num, max_len))
print("Tiempo: {:.5f} segundos".format(time.time() - start))
La cadena del numero 6171 tiene 262 elementos
Tiempo: 0.30881 segundos
## VERSION ITERATIVA SIN MEJORAS
# Tiempo: 25.73280 segundos; MEJOR que recursivas
# Resultado 837.799
import time
def collatz(n):
count = 1 # Para contar el propio 1
while n > 1:
if n % 2:
n = 3*n + 1
else:
n = n >> 1
count += 1
return n, count
start = time.time()
max_len = max_num = 0
for i in range(1, 100000):
num, length = collatz(i)
if length > max_len:
max_len = length
max_num = i
print("La cadena del numero {} tiene {} elementos".format(max_num, max_len))
print("Tiempo: {:.5f} segundos".format(time.time() - start))
# La cadena del numero 77031 tiene 351 elementos
# Tiempo: 2.09933 segundos
# %load_ext cythonmagic
# %%cython
## VERSION ITERATIVA CYTHON
# Tiempo: 15.87660 segundos; MEJOR que recursivas
# Resultado 837.799 - 525 elementos
# Con declaraciones -> 14.21716 segundos
import time
# import numpy as np
# cimport numpy as np
cimport cython
@cython.cfunc
@cython.cdivision(True) # Directiva a true para evitar comprobaciones
# @cython.locals(n=cython.int) ## no puedo ponerlo porque se ralla
@cython.locals(count=cython.int)
def collatz(n):
# count = cython.declare(cython.int, 1) # Para contar el propio 1
count = 1
while n > 1:
if n % 2:
n = 3*n + 1
else:
n = n >> 1
count += 1
return n, count
@cython.cdivision(True)
def main():
# DECLARACIONES
cdef int max_len, max_num, num, length
start = cython.declare(cython.double, time.time())
max_len = 0
for i in range(1, 300000):
num, length = collatz(i)
if length > max_len:
max_len = length
max_num = i
print("La cadena del numero {} tiene {} elementos".format(max_num, max_len))
print("Tiempo: {:.5f} segundos".format(time.time() - start))
main()
# La cadena del numero 230631 tiene 443 elementos
# Tiempo: 3.88586 segundos
# %%cython
## VERSION ITERATIVA CYTHON - CON CACHE
# Tiempo: 1.50683 segundos; MEJORA enorme
# Resultado 837.799 - 525 elementos
import time
# import numpy as np
# cimport numpy as np
cimport cython
CACHE = {}
@cython.cfunc
@cython.cdivision(True) # Directiva a true para evitar comprobaciones
# @cython.locals(n=cython.int) ## no puedo ponerlo porque se ralla
@cython.locals(count=cython.int)
def collatz(n):
global CACHE
CACHE[1] = 1
CACHE[2] = 2
count, aux = 0, n
# aux = n
while n > 1:
if n in CACHE:
count += CACHE[n]
CACHE[aux] = count
return n, count
else:
if n % 2:
n = 3*n + 1
else:
n = n >> 1
count += 1
CACHE[aux] = count
return n, count
@cython.cdivision(True)
def main2():
# DECLARACIONES
cdef int max_len, max_num, num, length
start = cython.declare(cython.double, time.time())
max_len = 0
for i in range(2, 1000000):
num, length = collatz(i)
if length > max_len:
max_len = length
max_num = i
print("La cadena del numero {} tiene {} elementos".format(max_num, max_len))
print("Tiempo: {:.5f} segundos".format(time.time() - start))
main2()
# La cadena del numero 837799 tiene 525 elementos
# Tiempo: 1.50683 segundos
| 25.40107
| 80
| 0.615158
|
4a057c1d5df6182ed588075fe0af822725e37196
| 11,903
|
py
|
Python
|
lte/gateway/python/magma/pipelined/tests/test_redirect.py
|
rdefosse/magma
|
d12ac827d0cdb39f499ce202e9e1196cc50b68d7
|
[
"BSD-3-Clause"
] | 1
|
2021-11-03T21:37:26.000Z
|
2021-11-03T21:37:26.000Z
|
lte/gateway/python/magma/pipelined/tests/test_redirect.py
|
rdefosse/magma
|
d12ac827d0cdb39f499ce202e9e1196cc50b68d7
|
[
"BSD-3-Clause"
] | 143
|
2020-09-08T06:24:23.000Z
|
2022-03-29T05:56:53.000Z
|
lte/gateway/python/magma/pipelined/tests/test_redirect.py
|
rdefosse/magma
|
d12ac827d0cdb39f499ce202e9e1196cc50b68d7
|
[
"BSD-3-Clause"
] | 2
|
2021-05-27T18:15:16.000Z
|
2021-05-27T18:41:39.000Z
|
"""
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import warnings
from concurrent.futures import Future
from unittest.mock import MagicMock
from lte.protos.mconfig.mconfigs_pb2 import PipelineD
from lte.protos.pipelined_pb2 import VersionedPolicy
from lte.protos.policydb_pb2 import (
FlowDescription,
FlowMatch,
PolicyRule,
RedirectInformation,
)
from magma.pipelined.app.enforcement import EnforcementController
from magma.pipelined.bridge_util import BridgeTools
from magma.pipelined.policy_converters import (
convert_ipv4_str_to_ip_proto,
flow_match_to_magma_match,
)
from magma.pipelined.tests.app.flow_query import RyuDirectFlowQuery as FlowQuery
from magma.pipelined.tests.app.packet_builder import TCPPacketBuilder
from magma.pipelined.tests.app.packet_injector import ScapyPacketInjector
from magma.pipelined.tests.app.start_pipelined import (
PipelinedController,
TestSetup,
)
from magma.pipelined.tests.app.subscriber import RyuDirectSubscriberContext
from magma.pipelined.tests.app.table_isolation import (
RyuDirectTableIsolator,
RyuForwardFlowArgsBuilder,
)
from magma.pipelined.tests.pipelined_test_util import (
FlowTest,
FlowVerifier,
assert_bridge_snapshot_match,
create_service_manager,
fake_controller_setup,
start_ryu_app_thread,
stop_ryu_app_thread,
wait_after_send,
)
class RedirectTest(unittest.TestCase):
BRIDGE = 'testing_br'
IFACE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP_ADDRESS = '192.168.128.1'
# TODO test for multiple incoming requests (why we match on tcp ports)
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures, mocks the redis policy_dictionary
of enforcement_controller
"""
super(RedirectTest, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([PipelineD.ENFORCEMENT])
cls._tbl_num = cls.service_manager.get_table_num(
EnforcementController.APP_NAME)
enforcement_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[PipelinedController.Enforcement,
PipelinedController.Testing,
PipelinedController.StartupFlows],
references={
PipelinedController.Enforcement:
enforcement_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP_ADDRESS,
'nat_iface': 'eth2',
'enodeb_iface': 'eth1',
'qos': {'enable': False},
'clean_restart': True,
'setup_type': 'LTE',
},
mconfig=PipelineD(),
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.IFACE)
cls.thread = start_ryu_app_thread(test_setup)
cls.enforcement_controller = enforcement_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
cls.enforcement_controller._redirect_manager._save_redirect_entry =\
MagicMock()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
def test_url_redirect(self):
"""
Partial redirection test, checks if flows were added properly for url
based redirection.
Assert:
1 Packet is matched
Packet bypass flows are added
Flow learn action is triggered - another flow is added to the table
"""
fake_controller_setup(self.enforcement_controller)
redirect_ips = ["185.128.101.5", "185.128.121.4"]
self.enforcement_controller._redirect_manager._dns_cache.get(
"about.sha.ddih.org", lambda: redirect_ips, max_age=42
)
imsi = 'IMSI010000000088888'
sub_ip = '192.168.128.74'
flow_list = [FlowDescription(match=FlowMatch())]
policy = VersionedPolicy(
rule=PolicyRule(
id='redir_test', priority=3, flow_list=flow_list,
redirect=RedirectInformation(
support=1,
address_type=2,
server_address="http://about.sha.ddih.org/"
)
),
version=1,
)
# ============================ Subscriber ============================
sub_context = RyuDirectSubscriberContext(
imsi, sub_ip, self.enforcement_controller, self._tbl_num
).add_policy(policy)
isolator = RyuDirectTableIsolator(
RyuForwardFlowArgsBuilder.from_subscriber(sub_context.cfg)
.build_requests(),
self.testing_controller
)
pkt_sender = ScapyPacketInjector(self.IFACE)
packet = TCPPacketBuilder()\
.set_tcp_layer(42132, 80, 321)\
.set_tcp_flags("S")\
.set_ip_layer('151.42.41.122', sub_ip)\
.set_ether_layer(self.MAC_DEST, "00:00:00:00:00:00")\
.build()
# Check if these flows were added (queries should return flows)
permit_outbound, permit_inbound = [], []
for ip in redirect_ips:
permit_outbound.append(FlowQuery(
self._tbl_num, self.testing_controller,
match=flow_match_to_magma_match(
FlowMatch(ip_dst=convert_ipv4_str_to_ip_proto(ip),
direction=FlowMatch.UPLINK))
))
permit_inbound.append(FlowQuery(
self._tbl_num, self.testing_controller,
match=flow_match_to_magma_match(
FlowMatch(ip_src=convert_ipv4_str_to_ip_proto(ip),
direction=FlowMatch.DOWNLINK))
))
learn_action_flow = flow_match_to_magma_match(
FlowMatch(
ip_proto=6, direction=FlowMatch.DOWNLINK,
ip_src=convert_ipv4_str_to_ip_proto(self.BRIDGE_IP_ADDRESS),
ip_dst=convert_ipv4_str_to_ip_proto(sub_ip))
)
learn_action_query = FlowQuery(self._tbl_num, self.testing_controller,
learn_action_flow)
# =========================== Verification ===========================
# 1 packet sent, permit rules installed, learn action installed. Since
# the enforcement table is entered via the DPI table and the scratch
# enforcement table, the number of packets handled by the table is 2.
flow_verifier = FlowVerifier(
[FlowTest(FlowQuery(self._tbl_num, self.testing_controller), 2),
FlowTest(learn_action_query, 0, flow_count=1)] +
[FlowTest(query, 0, flow_count=1) for query in permit_outbound] +
[FlowTest(query, 0, flow_count=1) for query in permit_inbound],
lambda: wait_after_send(self.testing_controller))
with isolator, sub_context, flow_verifier:
pkt_sender.send(packet)
assert_bridge_snapshot_match(self, self.BRIDGE,
self.service_manager)
flow_verifier.verify()
def test_ipv4_redirect(self):
"""
Partial redirection test, checks if flows were added properly for ipv4
based redirection.
Assert:
1 Packet is matched
Packet bypass flows are added
Flow learn action is triggered - another flow is added to the table
"""
fake_controller_setup(self.enforcement_controller)
redirect_ip = "54.12.31.42"
imsi = 'IMSI012000000088888'
sub_ip = '192.168.128.74'
flow_list = [FlowDescription(match=FlowMatch())]
policy = VersionedPolicy(
rule=PolicyRule(
id='redir_ip_test', priority=3, flow_list=flow_list,
redirect=RedirectInformation(
support=1,
address_type=0,
server_address=redirect_ip
)
),
version=1,
)
# ============================ Subscriber ============================
sub_context = RyuDirectSubscriberContext(
imsi, sub_ip, self.enforcement_controller, self._tbl_num
).add_policy(policy)
isolator = RyuDirectTableIsolator(
RyuForwardFlowArgsBuilder.from_subscriber(sub_context.cfg)
.build_requests(),
self.testing_controller
)
pkt_sender = ScapyPacketInjector(self.IFACE)
packet = TCPPacketBuilder()\
.set_tcp_layer(42132, 80, 321)\
.set_tcp_flags("S")\
.set_ip_layer('151.42.41.122', sub_ip)\
.set_ether_layer(self.MAC_DEST, "00:00:00:00:00:00")\
.build()
# Check if these flows were added (queries should return flows)
permit_outbound = FlowQuery(
self._tbl_num, self.testing_controller,
match=flow_match_to_magma_match(
FlowMatch(ip_dst=convert_ipv4_str_to_ip_proto(redirect_ip),
direction=FlowMatch.UPLINK))
)
permit_inbound = FlowQuery(
self._tbl_num, self.testing_controller,
match=flow_match_to_magma_match(
FlowMatch(ip_src=convert_ipv4_str_to_ip_proto(redirect_ip),
direction=FlowMatch.DOWNLINK))
)
learn_action_flow = flow_match_to_magma_match(
FlowMatch(
ip_proto=6, direction=FlowMatch.DOWNLINK,
ip_src=convert_ipv4_str_to_ip_proto(self.BRIDGE_IP_ADDRESS),
ip_dst=convert_ipv4_str_to_ip_proto(sub_ip))
)
learn_action_query = FlowQuery(self._tbl_num, self.testing_controller,
learn_action_flow)
# =========================== Verification ===========================
# 1 packet sent, permit rules installed, learn action installed. Since
# the enforcement table is entered via the DPI table and the scratch
# enforcement table, the number of packets handled by the table is 2.
flow_verifier = FlowVerifier([
FlowTest(FlowQuery(self._tbl_num, self.testing_controller), 2),
FlowTest(permit_outbound, 0, flow_count=1),
FlowTest(permit_inbound, 0, flow_count=1),
FlowTest(learn_action_query, 0, flow_count=1)
], lambda: wait_after_send(self.testing_controller))
with isolator, sub_context, flow_verifier:
pkt_sender.send(packet)
assert_bridge_snapshot_match(self, self.BRIDGE,
self.service_manager)
flow_verifier.verify()
if __name__ == "__main__":
unittest.main()
| 39.54485
| 80
| 0.620264
|
4a057d0f31f05d795c08f320983e999079b393be
| 1,859
|
py
|
Python
|
src/dataset/label_extractor.py
|
Digital-Pathology/CustomDataset
|
7b438b3151fcf766ef4853af03ea64b0616fc40a
|
[
"MIT"
] | null | null | null |
src/dataset/label_extractor.py
|
Digital-Pathology/CustomDataset
|
7b438b3151fcf766ef4853af03ea64b0616fc40a
|
[
"MIT"
] | null | null | null |
src/dataset/label_extractor.py
|
Digital-Pathology/CustomDataset
|
7b438b3151fcf766ef4853af03ea64b0616fc40a
|
[
"MIT"
] | null | null | null |
"""
An automatic (or overloaded) label inference class
"""
import abc
from collections import defaultdict
import csv
from email.policy import default
import json
import os
from . import util
class LabelExtractor(abc.ABC): # strategy pattern
""" Strategy Pattern --> extracts labels from path for dictionary-based lookup """
@staticmethod
@abc.abstractmethod
def extract_labels(path: str):
""" extracts labels from path for dictionary-based lookup """
class LabelExtractorNoLabels(LabelExtractor):
class DefaultDictWithGet(defaultdict):
def get(self, *args, **kwargs):
return 'LabelExtractorNoLabels'
@staticmethod
def extract_labels(path: str):
""" returns 'LabelExtractorNoLabels' for all labels """
return LabelExtractorNoLabels.DefaultDictWithGet()
class LabelExtractorJSON(LabelExtractor):
""" labels in json file """
@staticmethod
def extract_labels(path: str):
""" labels are inside of a json file at path of structure {key: label, ...} """
with open(path, 'r', encoding='utf-8') as file:
return json.load(file)
class LabelExtractorCSV(LabelExtractor):
""" labels in csv file """
@staticmethod
def extract_labels(path: str):
""" labels are inside of a csv file at path of structure (each line) <key><sep><label> """
with open(path, 'r', encoding='utf-8') as file:
reader = csv.reader(file)
return {row[0]: row[1] for row in reader}
class LabelExtractorParentDir(LabelExtractor):
""" labels represented by relative path """
@staticmethod
def extract_labels(path: str):
""" labels are path relative to path arg (label_postprocessor recommended) """
files = util.listdir_recursive(path)
return {f: os.path.dirname(f) for f in files}
| 28.6
| 98
| 0.669177
|
4a057d2a2834807d76f7aee9d535c0e72001783f
| 6,323
|
py
|
Python
|
jwskate/jws/compact.py
|
guillp/jwskate
|
271e88d4eca929a4be85e765f3bdbf8b649a2b9a
|
[
"MIT"
] | null | null | null |
jwskate/jws/compact.py
|
guillp/jwskate
|
271e88d4eca929a4be85e765f3bdbf8b649a2b9a
|
[
"MIT"
] | null | null | null |
jwskate/jws/compact.py
|
guillp/jwskate
|
271e88d4eca929a4be85e765f3bdbf8b649a2b9a
|
[
"MIT"
] | null | null | null |
"""This module implements the JWS Compact format."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Tuple, Union
from binapy import BinaPy
from jwskate.jwk.base import Jwk
from jwskate.token import BaseCompactToken
from .signature import JwsSignature
if TYPE_CHECKING:
from .json import JwsJsonFlat, JwsJsonGeneral # pragma: no cover
class InvalidJws(ValueError):
"""Raised when an invalid Jws is parsed."""
class JwsCompact(BaseCompactToken):
"""Represents a Json Web Signature (JWS), using compact serialization, as defined in RFC7515."""
def __init__(self, value: Union[bytes, str]):
"""Initialize a Jws, from its compact representation.
Args:
value: the JWS token value
"""
super().__init__(value)
header, payload, signature = self.split(self.value)
try:
self.headers = BinaPy(header).decode_from("b64u").parse_from("json")
except ValueError:
raise InvalidJws(
"Invalid JWS header: it must be a Base64URL-encoded JSON object"
)
try:
self.payload = BinaPy(payload).decode_from("b64u")
except ValueError:
raise InvalidJws(
"Invalid JWS payload: it must be a Base64URL-encoded binary data (bytes)"
)
try:
self.signature = BinaPy(signature).decode_from("b64u")
except ValueError:
raise InvalidJws(
"Invalid JWS signature: it must be a Base64URL-encoded binary data (bytes)"
)
@classmethod
def split(cls, value: bytes) -> Tuple[BinaPy, BinaPy, BinaPy]:
"""Splits a JWS token value into its (header, payload, signature) parts.
Args:
value: the JWS token value
Returns:
a (header, payload, signature)
Raises:
InvalidJws: if the provided value doesn't have 2 dots.
"""
if value.count(b".") != 2:
raise InvalidJws(
"A JWS must contain a header, a payload and a signature, separated by dots"
)
header, payload, signature = value.split(b".")
return BinaPy(header), BinaPy(payload), BinaPy(signature)
@classmethod
def sign(
cls,
payload: bytes,
jwk: Union[Jwk, Dict[str, Any]],
alg: Optional[str] = None,
extra_headers: Optional[Dict[str, Any]] = None,
) -> "JwsCompact":
"""Sign a payload and returns the resulting JwsCompact.
Args:
payload: the payload to sign
jwk: the jwk to use to sign this payload
alg: the alg to use
extra_headers: additional headers to add to the Jws Headers
Returns:
the resulting token
"""
jwk = Jwk(jwk)
headers = dict(extra_headers or {}, alg=alg)
kid = jwk.get("kid")
if kid:
headers["kid"] = kid
signed_part = JwsSignature.assemble_signed_part(headers, payload)
signature = jwk.sign(signed_part, alg=alg)
return cls.from_parts(signed_part, signature)
@classmethod
def from_parts(
cls, signed_part: Union[bytes, str], signature: Union[bytes, str]
) -> "JwsCompact":
"""Constructs a JWS token based on its signed part and signature values.
Signed part is the concatenation of the header and payload, both encoded in Base64-Url, and joined by a dot.
Args:
signed_part: the signed part
signature: the signature value
Returns:
the resulting token
"""
if not isinstance(signed_part, bytes):
signed_part = signed_part.encode("ascii")
return cls(b".".join((signed_part, BinaPy(signature).to("b64u"))))
@property
def signed_part(self) -> bytes:
"""Returns the signed part (header + payload) from this JwsCompact.
Returns:
the signed part
"""
return b".".join(self.value.split(b".", 2)[:2])
@property
def alg(self) -> str:
"""Get the signature algorithm (alg) from this token headers.
Returns:
the `alg` value
Raises:
AttributeError: if the `alg` header value is not a string
"""
alg = self.get_header("alg")
if alg is None or not isinstance(alg, str):
raise AttributeError("This JWS doesn't have a valid 'alg' header")
return alg
def verify_signature(
self,
jwk: Union[Jwk, Dict[str, Any]],
alg: Optional[str] = None,
algs: Optional[Iterable[str]] = None,
) -> bool:
"""Verify the signature from this JwsCompact using a Jwk.
Args:
jwk: the Jwk to use to validate this signature
alg: the alg to use, if there is only 1 allowed
algs: the allowed algs, if here are several
Returns:
`True` if the signature matches, `False` otherwise
"""
jwk = Jwk(jwk)
return jwk.verify(self.signed_part, self.signature, alg, algs)
def flat_json(self, unprotected_header: Any = None) -> JwsJsonFlat:
"""Create a JWS in JSON flat format based on this Compact JWS.
Args:
unprotected_header: optional unprotected header to include in the JWS JSON
Returns:
the resulting token
"""
from .json import JwsJsonFlat
protected, payload, signature = self.split(self.value)
content = {
"payload": payload.ascii(),
"protected": protected.ascii(),
"signature": signature.ascii(),
}
if unprotected_header is not None:
content["header"] = unprotected_header
return JwsJsonFlat(content)
def general_json(self, unprotected_header: Any = None) -> JwsJsonGeneral:
"""Create a JWS in JSON General format based on this JWS Compact.
The resulting token will have a single signature which is the one from this token.
Args:
unprotected_header: optional unprotected header to include in the JWS JSON
Returns:
the resulting token
"""
jws = self.flat_json(unprotected_header)
return jws.generalize()
| 31.147783
| 116
| 0.601613
|
4a057d52d5f7a75992fed04e23068cfcfd8340bb
| 60
|
py
|
Python
|
sample/helpers.py
|
wisemantis/sample_python
|
d444243d543bc6d99c41e68520e6ead34fc7b812
|
[
"BSD-2-Clause"
] | null | null | null |
sample/helpers.py
|
wisemantis/sample_python
|
d444243d543bc6d99c41e68520e6ead34fc7b812
|
[
"BSD-2-Clause"
] | null | null | null |
sample/helpers.py
|
wisemantis/sample_python
|
d444243d543bc6d99c41e68520e6ead34fc7b812
|
[
"BSD-2-Clause"
] | null | null | null |
def get_answer():
"""Get an answer."""
return True
| 12
| 24
| 0.566667
|
4a057d6da7d499a9f6fbd7255dc6ca13f63ae7a4
| 19,347
|
py
|
Python
|
resnet/estimator_benchmark.py
|
biolins/frivolous_dnns
|
23d9a057ac517770cdfe9d8ac71543c328fcf76d
|
[
"MIT"
] | null | null | null |
resnet/estimator_benchmark.py
|
biolins/frivolous_dnns
|
23d9a057ac517770cdfe9d8ac71543c328fcf76d
|
[
"MIT"
] | null | null | null |
resnet/estimator_benchmark.py
|
biolins/frivolous_dnns
|
23d9a057ac517770cdfe9d8ac71543c328fcf76d
|
[
"MIT"
] | 2
|
2021-05-31T23:08:13.000Z
|
2021-12-28T19:11:08.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Executes Estimator benchmarks and accuracy tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
from absl import flags
from absl.testing import flagsaver
import tensorflow as tf # pylint: disable=g-bad-import-order
import cifar10_main as cifar_main
import imagenet_main
from official.utils.flags import core as flags_core
from official.utils.logs import hooks
IMAGENET_DATA_DIR_NAME = 'imagenet'
CIFAR_DATA_DIR_NAME = 'cifar-10-batches-bin'
FLAGS = flags.FLAGS
class EstimatorBenchmark(tf.test.Benchmark):
"""Base class to hold methods common to test classes in the module.
Code under test for Estimator models (ResNet50 and 56) report mostly the
same data and require the same FLAG setup.
"""
local_flags = None
def __init__(self, output_dir=None, default_flags=None, flag_methods=None):
if not output_dir:
output_dir = '/tmp'
self.output_dir = output_dir
self.default_flags = default_flags or {}
self.flag_methods = flag_methods or {}
def _get_model_dir(self, folder_name):
"""Returns directory to store info, e.g. saved model and event log."""
return os.path.join(self.output_dir, folder_name)
def _setup(self):
"""Sets up and resets flags before each test."""
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.DEBUG)
if EstimatorBenchmark.local_flags is None:
for flag_method in self.flag_methods:
flag_method()
# Loads flags to get defaults to then override. List cannot be empty.
flags.FLAGS(['foo'])
# Overrides flag values with defaults for the class of tests.
for k, v in self.default_flags.items():
setattr(FLAGS, k, v)
saved_flag_values = flagsaver.save_flag_values()
EstimatorBenchmark.local_flags = saved_flag_values
else:
flagsaver.restore_flag_values(EstimatorBenchmark.local_flags)
def _report_benchmark(self,
stats,
wall_time_sec,
top_1_max=None,
top_1_min=None):
"""Report benchmark results by writing to local protobuf file.
Args:
stats: dict returned from estimator models with known entries.
wall_time_sec: the during of the benchmark execution in seconds
top_1_max: highest passing level for top_1 accuracy.
top_1_min: lowest passing level for top_1 accuracy.
"""
examples_per_sec_hook = None
for hook in stats['train_hooks']:
if isinstance(hook, hooks.ExamplesPerSecondHook):
examples_per_sec_hook = hook
break
eval_results = stats['eval_results']
metrics = []
if 'accuracy' in eval_results:
metrics.append({'name': 'accuracy_top_1',
'value': eval_results['accuracy'].item(),
'min_value': top_1_min,
'max_value': top_1_max})
if 'accuracy_top_5' in eval_results:
metrics.append({'name': 'accuracy_top_5',
'value': eval_results['accuracy_top_5'].item()})
if examples_per_sec_hook:
exp_per_second_list = examples_per_sec_hook.current_examples_per_sec_list
# ExamplesPerSecondHook skips the first 10 steps.
exp_per_sec = sum(exp_per_second_list) / (len(exp_per_second_list))
metrics.append({'name': 'exp_per_second',
'value': exp_per_sec})
flags_str = flags_core.get_nondefault_flags_as_str()
self.report_benchmark(
iters=eval_results.get('global_step', None),
wall_time=wall_time_sec,
metrics=metrics,
extras={'flags': flags_str})
class Resnet50EstimatorAccuracy(EstimatorBenchmark):
"""Benchmark accuracy tests for ResNet50 w/ Estimator."""
def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
"""Benchmark accuracy tests for ResNet50 w/ Estimator.
Args:
output_dir: directory where to output e.g. log files
root_data_dir: directory under which to look for dataset
**kwargs: arbitrary named arguments. This is needed to make the
constructor forward compatible in case PerfZero provides more
named arguments before updating the constructor.
"""
flag_methods = [imagenet_main.define_imagenet_flags]
self.data_dir = os.path.join(root_data_dir, IMAGENET_DATA_DIR_NAME)
super(Resnet50EstimatorAccuracy, self).__init__(
output_dir=output_dir, flag_methods=flag_methods)
def benchmark_graph_8_gpu(self):
"""Test 8 GPUs graph mode."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.data_dir = self.data_dir
FLAGS.batch_size = 128 * 8
FLAGS.train_epochs = 90
FLAGS.epochs_between_evals = 10
FLAGS.model_dir = self._get_model_dir('benchmark_graph_8_gpu')
FLAGS.dtype = 'fp32'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_8_gpu(self):
"""Test FP16 8 GPUs graph mode."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.data_dir = self.data_dir
FLAGS.batch_size = 256 * 8
FLAGS.train_epochs = 90
FLAGS.epochs_between_evals = 10
FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_8_gpu')
FLAGS.dtype = 'fp16'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_graph_rewrite_8_gpu(self):
"""Test FP16 graph rewrite 8 GPUs graph mode."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.data_dir = self.data_dir
FLAGS.batch_size = 256 * 8
FLAGS.train_epochs = 90
FLAGS.epochs_between_evals = 10
FLAGS.model_dir = self._get_model_dir(
'benchmark_graph_fp16_graph_rewrite_8_gpu')
FLAGS.dtype = 'fp16'
FLAGS.fp16_implementation = 'graph_rewrite'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def _run_and_report_benchmark(self):
start_time_sec = time.time()
stats = imagenet_main.run_imagenet(flags.FLAGS)
wall_time_sec = time.time() - start_time_sec
self._report_benchmark(stats,
wall_time_sec,
top_1_min=0.762,
top_1_max=0.766)
class Resnet50EstimatorBenchmarkBase(EstimatorBenchmark):
"""Base class for benchmarks for ResNet50 using Estimator."""
local_flags = None
def __init__(self, output_dir=None, default_flags=None):
flag_methods = [imagenet_main.define_imagenet_flags]
super(Resnet50EstimatorBenchmarkBase, self).__init__(
output_dir=output_dir,
default_flags=default_flags,
flag_methods=flag_methods)
def _run_and_report_benchmark(self):
start_time_sec = time.time()
stats = imagenet_main.run_imagenet(FLAGS)
wall_time_sec = time.time() - start_time_sec
print(stats)
# Remove values to skip triggering accuracy check.
stats['eval_results'].pop('accuracy', None)
stats['eval_results'].pop('accuracy_top_5', None)
self._report_benchmark(stats, wall_time_sec)
class Resnet50EstimatorBenchmark(Resnet50EstimatorBenchmarkBase):
"""Benchmarks for ResNet50 using Estimator with 1 worker."""
def __init__(self, output_dir=None, default_flags=None):
super(Resnet50EstimatorBenchmark, self).__init__(
output_dir=output_dir,
default_flags=default_flags)
def benchmark_graph_fp16_1_gpu(self):
"""Benchmarks graph fp16 1 gpu."""
self._setup()
FLAGS.num_gpus = 1
FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_1_gpu')
FLAGS.batch_size = 128
FLAGS.dtype = 'fp16'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_1_gpu_tweaked(self):
"""Benchmarks graph fp16 1 gpu tweaked."""
self._setup()
FLAGS.num_gpus = 1
FLAGS.tf_gpu_thread_mode = 'gpu_private'
FLAGS.intra_op_parallelism_threads = 1
FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_1_gpu_tweaked')
FLAGS.batch_size = 256
FLAGS.dtype = 'fp16'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_graph_rewrite_1_gpu_tweaked(self):
"""Benchmarks graph fp16 graph rewrite 1 gpu tweaked."""
self._setup()
FLAGS.num_gpus = 1
FLAGS.tf_gpu_thread_mode = 'gpu_private'
FLAGS.intra_op_parallelism_threads = 1
FLAGS.model_dir = self._get_model_dir(
'benchmark_graph_fp16_graph_rewrite_1_gpu_tweaked')
FLAGS.batch_size = 256
FLAGS.dtype = 'fp16'
FLAGS.fp16_implementation = 'graph_rewrite'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_1_gpu(self):
"""Benchmarks graph 1 gpu."""
self._setup()
FLAGS.num_gpus = 1
FLAGS.model_dir = self._get_model_dir('benchmark_graph_1_gpu')
FLAGS.batch_size = 128
FLAGS.dtype = 'fp32'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_8_gpu(self):
"""Benchmarks graph 8 gpus."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.model_dir = self._get_model_dir('benchmark_graph_8_gpu')
FLAGS.batch_size = 128 * 8
FLAGS.dtype = 'fp32'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_8_gpu(self):
"""Benchmarks graph fp16 8 gpus."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_8_gpu')
FLAGS.batch_size = 256 * 8
FLAGS.dtype = 'fp16'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_8_gpu_tweaked(self):
"""Benchmarks graph fp16 8 gpus tweaked."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.tf_gpu_thread_mode = 'gpu_private'
FLAGS.intra_op_parallelism_threads = 1
FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_8_gpu_tweaked')
FLAGS.batch_size = 256 * 8
FLAGS.dtype = 'fp16'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_graph_rewrite_8_gpu_tweaked(self):
"""Benchmarks graph fp16 graph rewrite 8 gpus tweaked."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.tf_gpu_thread_mode = 'gpu_private'
FLAGS.intra_op_parallelism_threads = 1
FLAGS.model_dir = self._get_model_dir(
'benchmark_graph_fp16_graph_rewrite_8_gpu_tweaked')
FLAGS.batch_size = 256 * 8
FLAGS.dtype = 'fp16'
FLAGS.fp16_implementation = 'graph_rewrite'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
class Resnet50EstimatorBenchmarkSynth(Resnet50EstimatorBenchmark):
"""Resnet50 synthetic benchmark tests."""
def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
def_flags = {}
def_flags['use_synthetic_data'] = True
def_flags['max_train_steps'] = 110
def_flags['train_epochs'] = 1
super(Resnet50EstimatorBenchmarkSynth, self).__init__(
output_dir=output_dir, default_flags=def_flags)
class Resnet50EstimatorBenchmarkReal(Resnet50EstimatorBenchmark):
"""Resnet50 real data benchmark tests."""
def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
def_flags = {}
def_flags['data_dir'] = os.path.join(root_data_dir, IMAGENET_DATA_DIR_NAME)
def_flags['max_train_steps'] = 110
def_flags['train_epochs'] = 1
super(Resnet50EstimatorBenchmarkReal, self).__init__(
output_dir=output_dir, default_flags=def_flags)
class Resnet50MultiWorkerEstimatorBenchmark(Resnet50EstimatorBenchmarkBase):
"""Benchmarks for ResNet50 using Estimator with multiple workers."""
def __init__(self, output_dir=None, default_flags=None):
super(Resnet50MultiWorkerEstimatorBenchmark, self).__init__(
output_dir=output_dir,
default_flags=default_flags)
def benchmark_graph_fp16_8_gpu_ring_tweaked(self):
"""Benchmarks graph fp16 8 gpus with ring collective tweaked."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.distribution_strategy = 'multi_worker_mirrored'
FLAGS.all_reduce_alg = 'ring'
FLAGS.tf_gpu_thread_mode = 'gpu_private'
FLAGS.intra_op_parallelism_threads = 1
FLAGS.datasets_num_private_threads = 32
FLAGS.model_dir = self._get_model_dir(
folder_name='benchmark_graph_fp16_8_gpu_ring_tweaked')
FLAGS.batch_size = 256 * 8
FLAGS.dtype = 'fp16'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_8_gpu_nccl_tweaked(self):
"""Benchmarks graph fp16 8 gpus with nccl collective tweaked."""
self._setup()
FLAGS.num_gpus = 8
FLAGS.distribution_strategy = 'multi_worker_mirrored'
FLAGS.all_reduce_alg = 'nccl'
FLAGS.tf_gpu_thread_mode = 'gpu_private'
FLAGS.intra_op_parallelism_threads = 1
FLAGS.datasets_num_private_threads = 32
FLAGS.model_dir = self._get_model_dir(
folder_name='benchmark_graph_fp16_8_gpu_nccl_tweaked')
FLAGS.batch_size = 256 * 8
FLAGS.dtype = 'fp16'
FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
class Resnet50MultiWorkerEstimatorBenchmarkSynth(
Resnet50MultiWorkerEstimatorBenchmark):
"""ResNet50, multi-worker, Estimator, synthetic data."""
def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
def_flags = {}
def_flags['use_synthetic_data'] = True
def_flags['max_train_steps'] = 110
def_flags['train_epochs'] = 1
super(Resnet50MultiWorkerEstimatorBenchmarkSynth, self).__init__(
output_dir=output_dir, default_flags=def_flags)
class Resnet56EstimatorAccuracy(EstimatorBenchmark):
"""Accuracy tests for Estimator ResNet56."""
local_flags = None
def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
"""A benchmark class.
Args:
output_dir: directory where to output e.g. log files
root_data_dir: directory under which to look for dataset
**kwargs: arbitrary named arguments. This is needed to make the
constructor forward compatible in case PerfZero provides more
named arguments before updating the constructor.
"""
flag_methods = [cifar_main.define_cifar_flags]
self.data_dir = os.path.join(root_data_dir, CIFAR_DATA_DIR_NAME)
super(Resnet56EstimatorAccuracy, self).__init__(
output_dir=output_dir, flag_methods=flag_methods)
def benchmark_graph_1_gpu(self):
"""Test layers model with Estimator and distribution strategies."""
self._setup()
flags.FLAGS.num_gpus = 1
flags.FLAGS.data_dir = self.data_dir
flags.FLAGS.batch_size = 128
flags.FLAGS.train_epochs = 182
flags.FLAGS.model_dir = self._get_model_dir('benchmark_graph_1_gpu')
flags.FLAGS.resnet_size = 56
flags.FLAGS.dtype = 'fp32'
flags.FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_1_gpu(self):
"""Test layers FP16 model with Estimator and distribution strategies."""
self._setup()
flags.FLAGS.num_gpus = 1
flags.FLAGS.data_dir = self.data_dir
flags.FLAGS.batch_size = 128
flags.FLAGS.train_epochs = 182
flags.FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_1_gpu')
flags.FLAGS.resnet_size = 56
flags.FLAGS.dtype = 'fp16'
flags.FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_2_gpu(self):
"""Test layers model with Estimator and dist_strat. 2 GPUs."""
self._setup()
flags.FLAGS.num_gpus = 2
flags.FLAGS.data_dir = self.data_dir
flags.FLAGS.batch_size = 128
flags.FLAGS.train_epochs = 182
flags.FLAGS.model_dir = self._get_model_dir('benchmark_graph_2_gpu')
flags.FLAGS.resnet_size = 56
flags.FLAGS.dtype = 'fp32'
flags.FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def benchmark_graph_fp16_2_gpu(self):
"""Test layers FP16 model with Estimator and dist_strat. 2 GPUs."""
self._setup()
flags.FLAGS.num_gpus = 2
flags.FLAGS.data_dir = self.data_dir
flags.FLAGS.batch_size = 128
flags.FLAGS.train_epochs = 182
flags.FLAGS.model_dir = self._get_model_dir('benchmark_graph_fp16_2_gpu')
flags.FLAGS.resnet_size = 56
flags.FLAGS.dtype = 'fp16'
flags.FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def unit_test(self):
"""A lightweight test that can finish quickly."""
self._setup()
flags.FLAGS.num_gpus = 1
flags.FLAGS.data_dir = self.data_dir
flags.FLAGS.batch_size = 128
flags.FLAGS.train_epochs = 1
flags.FLAGS.model_dir = self._get_model_dir('unit_test')
flags.FLAGS.resnet_size = 8
flags.FLAGS.dtype = 'fp32'
flags.FLAGS.hooks = ['ExamplesPerSecondHook']
self._run_and_report_benchmark()
def _run_and_report_benchmark(self):
"""Executes benchmark and reports result."""
start_time_sec = time.time()
stats = cifar_main.run_cifar(flags.FLAGS)
wall_time_sec = time.time() - start_time_sec
self._report_benchmark(stats,
wall_time_sec,
top_1_min=0.926,
top_1_max=0.938)
| 38.694
| 85
| 0.658655
|
4a057dc839b4ecf4c451e20faffe181aedc33917
| 1,378
|
py
|
Python
|
tests/pytest_extension/doc/test_doc_parametrize.py
|
broglep-work/python-pytest-cases
|
4976c0073a2fad5fbe5de34a5d1199efda0b7da9
|
[
"BSD-3-Clause"
] | 213
|
2018-07-05T21:21:21.000Z
|
2022-03-22T04:54:53.000Z
|
tests/pytest_extension/doc/test_doc_parametrize.py
|
broglep-work/python-pytest-cases
|
4976c0073a2fad5fbe5de34a5d1199efda0b7da9
|
[
"BSD-3-Clause"
] | 259
|
2018-06-22T16:46:33.000Z
|
2022-03-23T19:39:15.000Z
|
tests/pytest_extension/doc/test_doc_parametrize.py
|
broglep-work/python-pytest-cases
|
4976c0073a2fad5fbe5de34a5d1199efda0b7da9
|
[
"BSD-3-Clause"
] | 27
|
2019-03-26T12:46:49.000Z
|
2022-02-21T16:56:23.000Z
|
# Authors: Sylvain MARIE <sylvain.marie@se.com>
# + All contributors to <https://github.com/smarie/python-pytest-cases>
#
# License: 3-clause BSD, <https://github.com/smarie/python-pytest-cases/blob/master/LICENSE>
import pytest
from pytest_cases import parametrize, fixture, fixture_ref, lazy_value
@pytest.fixture
def world_str():
return 'world'
def whatfun():
return 'what'
@fixture
@parametrize('who', [world_str, 'you'])
def greetings(who):
return 'hello ' + who
@parametrize('main_msg', ['nothing',
fixture_ref(world_str),
lazy_value(whatfun),
"1",
fixture_ref(greetings)],
auto_refs=False)
@pytest.mark.parametrize('ending', ['?', '!'])
def test_prints(main_msg, ending):
print(main_msg + ending)
def test_synthesis(module_results_dct):
assert list(module_results_dct) == [
'test_prints[nothing-?]',
'test_prints[nothing-!]',
'test_prints[world_str-?]',
'test_prints[world_str-!]',
'test_prints[whatfun-?]',
'test_prints[whatfun-!]',
'test_prints[1-?]',
'test_prints[1-!]',
'test_prints[greetings-world_str-?]',
'test_prints[greetings-world_str-!]',
'test_prints[greetings-you-?]',
'test_prints[greetings-you-!]'
]
| 27.56
| 92
| 0.600145
|
4a057ecaf60283aadc9b57386fe0723aca024a76
| 5,809
|
py
|
Python
|
artemis/ml/tools/processors.py
|
wouterkool/artemis
|
5ac3143d22ba2e7addc93396d059110104096233
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
artemis/ml/tools/processors.py
|
wouterkool/artemis
|
5ac3143d22ba2e7addc93396d059110104096233
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
artemis/ml/tools/processors.py
|
wouterkool/artemis
|
5ac3143d22ba2e7addc93396d059110104096233
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2018-11-25T12:48:03.000Z
|
2018-11-25T12:48:03.000Z
|
from abc import abstractmethod
import numpy as np
from artemis.general.mymath import recent_moving_average
from six.moves import xrange
__author__ = 'peter'
class OneHotEncoding(object):
def __init__(self, n_classes = None, form = 'bin', dtype = None):
assert form in ('bin', 'sign')
if dtype is None:
dtype = np.int32 if form == 'sign' else bool
self._n_classes = n_classes
self._dtype = dtype
self.form = form
def __call__(self, data):
if self._n_classes is None:
self._n_classes = np.max(data)+1
out = np.zeros((data.size, self._n_classes, ), dtype = self._dtype)
if self.form == 'sign':
out[:] = -1
if data.size > 0: # Silly numpy
out[np.arange(data.size), data.flatten()] = 1
out = out.reshape(data.shape+(self._n_classes, ))
return out
def inverse(self, data):
return np.argmax(data, axis = 1)
class RunningAverage(object):
def __init__(self):
self._n_samples_seen = 0
self._average = 0
def __call__(self, data):
self._n_samples_seen+=1
frac = 1./self._n_samples_seen
self._average = (1-frac)*self._average + frac*data
return self._average
@classmethod
def batch(cls, x):
return np.cumsum(x, axis=0)/np.arange(1, len(x)+1).astype(np.float)[(slice(None), )+(None, )*(x.ndim-1)]
class RecentRunningAverage(object):
def __init__(self):
self._n_samples_seen = 0
self._average = 0
def __call__(self, data):
self._n_samples_seen+=1
frac = 1/self._n_samples_seen**.5
self._average = (1-frac)*self._average + frac*data
return self._average
@classmethod
def batch(cls, x):
# return recent_moving_average(x, axis=0) # Works only for python 2.X, with weave
ra = cls()
return np.array([ra(x_) for x_ in x])
class RunningAverageWithBurnin(object):
def __init__(self, burn_in_steps):
self._burn_in_step_remaining = burn_in_steps
self.averager = RunningAverage()
def __call__(self, x):
if self._burn_in_step_remaining > 0:
self._burn_in_step_remaining-=1
return x
else:
return self.averager(x)
class IDifferentiableFunction(object):
@abstractmethod
def __call__(self, *args):
pass
@abstractmethod
def backprop_delta(self, delta_y):
pass
def batch_call(self, *args, **kwargs):
return single_to_batch(self, *args, **kwargs)
def batch_backprop_delta(self, *args, **kwargs):
return single_to_batch(self.backprop_delta, *args, **kwargs)
class NonNormalize(IDifferentiableFunction):
def __call__(self, x):
return x
def backprop_delta(self, delta_y):
return delta_y
class RunningCenter(IDifferentiableFunction):
"""
Keep an exponentially decaying running mean, subtract this from the value.
"""
def __init__(self, half_life):
self.decay_constant = np.exp(-np.log(2)/half_life)
self.one_minus_decay_constant = 1-self.decay_constant
self.running_mean = None
def __call__(self, x):
if self.running_mean is None:
self.running_mean = np.zeros_like(x)
self.running_mean[:] = self.decay_constant * self.running_mean + self.one_minus_decay_constant * x
return x - self.running_mean
def backprop_delta(self, delta_y):
return self.decay_constant * delta_y
class RunningNormalize(IDifferentiableFunction):
def __init__(self, half_life, eps = 1e-7, initial_std=1):
self.decay_constant = np.exp(-np.log(2)/half_life)
self.one_minus_decay_constant = 1-self.decay_constant
self.running_mean = None
self.eps = eps
self.initial_std = initial_std
def __call__(self, x):
if self.running_mean is None:
self.running_mean = np.zeros_like(x)
self.running_mean_sq = np.zeros_like(x) + self.initial_std**2
self.running_mean[:] = self.decay_constant * self.running_mean + self.one_minus_decay_constant * x
self.running_mean_sq[:] = self.decay_constant * self.running_mean_sq + self.one_minus_decay_constant * x**2
std = np.sqrt(self.running_mean_sq - self.running_mean**2)
return (x - self.running_mean) / (std+self.eps)
def backprop_delta(self, delta_y):
"""
Ok, we're not doing this right at all, but lets just ignore the contribution of the current
sample to the mean/std. This makes the gradient waaaaaay simpler. If you want to see the real thing, put
(x-(a*u+(1-a)*x))/sqrt((a*s+(1-a)*x^2 - (a*u+(1-a)*x)^2))
into http://www.derivative-calculator.net/
(a stands for lambda here)
:param delta_y: The derivative of the cost wrt the output of this normalizer
:return: delta_x: The derivative of the cost wrt the input of this normalizer
"""
std = np.sqrt(self.running_mean_sq - self.running_mean**2)
return delta_y/std
def single_to_batch(fcn, *batch_inputs, **batch_kwargs):
"""
:param fcn: A function
:param batch_inputs: A collection of batch-form (n_samples, input_dims_i) inputs
:return: batch_outputs, an (n_samples, output_dims) array
"""
n_samples = len(batch_inputs[0])
assert all(len(b) == n_samples for b in batch_inputs)
first_out = fcn(*[b[0] for b in batch_inputs], **{k: b[0] for k, b in batch_kwargs.items()})
if n_samples==1:
return first_out[None]
out = np.empty((n_samples, )+first_out.shape)
out[0] = n_samples
for i in xrange(1, n_samples):
out[i] = fcn(*[b[i] for b in batch_inputs], **{k: b[i] for k, b in batch_kwargs.items()})
return out
| 32.093923
| 115
| 0.641763
|
4a057ee596d178bb7d3e667a3ddbfcea36c49685
| 83,296
|
py
|
Python
|
virtual/lib/python3.6/site-packages/sqlalchemy/engine/base.py
|
mzazakeith/flask-blog
|
2833404cc5e96ffdbfb767f35b9caf2bdcce7997
|
[
"MIT"
] | 207
|
2018-10-01T08:53:01.000Z
|
2022-03-14T12:15:54.000Z
|
virtual/lib/python3.6/site-packages/sqlalchemy/engine/base.py
|
mzazakeith/flask-blog
|
2833404cc5e96ffdbfb767f35b9caf2bdcce7997
|
[
"MIT"
] | 11
|
2019-12-26T17:21:03.000Z
|
2022-03-21T22:17:07.000Z
|
lib/python2.7/site-packages/sqlalchemy/engine/base.py
|
anish03/weather-dash
|
d517fa9da9028d1fc5d8fd71d77cee829ddee87b
|
[
"MIT"
] | 53
|
2019-03-12T16:50:21.000Z
|
2022-03-15T23:16:18.000Z
|
# engine/base.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import with_statement
"""Defines :class:`.Connection` and :class:`.Engine`.
"""
import sys
from .. import exc, util, log, interfaces
from ..sql import util as sql_util
from ..sql import schema
from .interfaces import Connectable, ExceptionContext
from .util import _distill_params
import contextlib
class Connection(Connectable):
"""Provides high-level functionality for a wrapped DB-API connection.
Provides execution support for string-based SQL statements as well as
:class:`.ClauseElement`, :class:`.Compiled` and :class:`.DefaultGenerator`
objects. Provides a :meth:`begin` method to return :class:`.Transaction`
objects.
The Connection object is **not** thread-safe. While a Connection can be
shared among threads using properly synchronized access, it is still
possible that the underlying DBAPI connection may not support shared
access between threads. Check the DBAPI documentation for details.
The Connection object represents a single dbapi connection checked out
from the connection pool. In this state, the connection pool has no affect
upon the connection, including its expiration or timeout state. For the
connection pool to properly manage connections, connections should be
returned to the connection pool (i.e. ``connection.close()``) whenever the
connection is not in use.
.. index::
single: thread safety; Connection
"""
schema_for_object = schema._schema_getter(None)
"""Return the ".schema" attribute for an object.
Used for :class:`.Table`, :class:`.Sequence` and similar objects,
and takes into account
the :paramref:`.Connection.execution_options.schema_translate_map`
parameter.
.. versionadded:: 1.1
.. seealso::
:ref:`schema_translating`
"""
def __init__(self, engine, connection=None, close_with_result=False,
_branch_from=None, _execution_options=None,
_dispatch=None,
_has_events=None):
"""Construct a new Connection.
The constructor here is not public and is only called only by an
:class:`.Engine`. See :meth:`.Engine.connect` and
:meth:`.Engine.contextual_connect` methods.
"""
self.engine = engine
self.dialect = engine.dialect
self.__branch_from = _branch_from
self.__branch = _branch_from is not None
if _branch_from:
self.__connection = connection
self._execution_options = _execution_options
self._echo = _branch_from._echo
self.should_close_with_result = False
self.dispatch = _dispatch
self._has_events = _branch_from._has_events
self.schema_for_object = _branch_from.schema_for_object
else:
self.__connection = connection \
if connection is not None else engine.raw_connection()
self.__transaction = None
self.__savepoint_seq = 0
self.should_close_with_result = close_with_result
self.__invalid = False
self.__can_reconnect = True
self._echo = self.engine._should_log_info()
if _has_events is None:
# if _has_events is sent explicitly as False,
# then don't join the dispatch of the engine; we don't
# want to handle any of the engine's events in that case.
self.dispatch = self.dispatch._join(engine.dispatch)
self._has_events = _has_events or (
_has_events is None and engine._has_events)
assert not _execution_options
self._execution_options = engine._execution_options
if self._has_events or self.engine._has_events:
self.dispatch.engine_connect(self, self.__branch)
def _branch(self):
"""Return a new Connection which references this Connection's
engine and connection; but does not have close_with_result enabled,
and also whose close() method does nothing.
The Core uses this very sparingly, only in the case of
custom SQL default functions that are to be INSERTed as the
primary key of a row where we need to get the value back, so we have
to invoke it distinctly - this is a very uncommon case.
Userland code accesses _branch() when the connect() or
contextual_connect() methods are called. The branched connection
acts as much as possible like the parent, except that it stays
connected when a close() event occurs.
"""
if self.__branch_from:
return self.__branch_from._branch()
else:
return self.engine._connection_cls(
self.engine,
self.__connection,
_branch_from=self,
_execution_options=self._execution_options,
_has_events=self._has_events,
_dispatch=self.dispatch)
@property
def _root(self):
"""return the 'root' connection.
Returns 'self' if this connection is not a branch, else
returns the root connection from which we ultimately branched.
"""
if self.__branch_from:
return self.__branch_from
else:
return self
def _clone(self):
"""Create a shallow copy of this Connection.
"""
c = self.__class__.__new__(self.__class__)
c.__dict__ = self.__dict__.copy()
return c
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def execution_options(self, **opt):
r""" Set non-SQL options for the connection which take effect
during execution.
The method returns a copy of this :class:`.Connection` which references
the same underlying DBAPI connection, but also defines the given
execution options which will take effect for a call to
:meth:`execute`. As the new :class:`.Connection` references the same
underlying resource, it's usually a good idea to ensure that the copies
will be discarded immediately, which is implicit if used as in::
result = connection.execution_options(stream_results=True).\
execute(stmt)
Note that any key/value can be passed to
:meth:`.Connection.execution_options`, and it will be stored in the
``_execution_options`` dictionary of the :class:`.Connection`. It
is suitable for usage by end-user schemes to communicate with
event listeners, for example.
The keywords that are currently recognized by SQLAlchemy itself
include all those listed under :meth:`.Executable.execution_options`,
as well as others that are specific to :class:`.Connection`.
:param autocommit: Available on: Connection, statement.
When True, a COMMIT will be invoked after execution
when executed in 'autocommit' mode, i.e. when an explicit
transaction is not begun on the connection. Note that DBAPI
connections by default are always in a transaction - SQLAlchemy uses
rules applied to different kinds of statements to determine if
COMMIT will be invoked in order to provide its "autocommit" feature.
Typically, all INSERT/UPDATE/DELETE statements as well as
CREATE/DROP statements have autocommit behavior enabled; SELECT
constructs do not. Use this option when invoking a SELECT or other
specific SQL construct where COMMIT is desired (typically when
calling stored procedures and such), and an explicit
transaction is not in progress.
:param compiled_cache: Available on: Connection.
A dictionary where :class:`.Compiled` objects
will be cached when the :class:`.Connection` compiles a clause
expression into a :class:`.Compiled` object.
It is the user's responsibility to
manage the size of this dictionary, which will have keys
corresponding to the dialect, clause element, the column
names within the VALUES or SET clause of an INSERT or UPDATE,
as well as the "batch" mode for an INSERT or UPDATE statement.
The format of this dictionary is not guaranteed to stay the
same in future releases.
Note that the ORM makes use of its own "compiled" caches for
some operations, including flush operations. The caching
used by the ORM internally supersedes a cache dictionary
specified here.
:param isolation_level: Available on: :class:`.Connection`.
Set the transaction isolation level for
the lifespan of this :class:`.Connection` object (*not* the
underlying DBAPI connection, for which the level is reset
to its original setting upon termination of this
:class:`.Connection` object).
Valid values include
those string values accepted by the
:paramref:`.create_engine.isolation_level`
parameter passed to :func:`.create_engine`. These levels are
semi-database specific; see individual dialect documentation for
valid levels.
Note that this option necessarily affects the underlying
DBAPI connection for the lifespan of the originating
:class:`.Connection`, and is not per-execution. This
setting is not removed until the underlying DBAPI connection
is returned to the connection pool, i.e.
the :meth:`.Connection.close` method is called.
.. warning:: The ``isolation_level`` execution option should
**not** be used when a transaction is already established, that
is, the :meth:`.Connection.begin` method or similar has been
called. A database cannot change the isolation level on a
transaction in progress, and different DBAPIs and/or
SQLAlchemy dialects may implicitly roll back or commit
the transaction, or not affect the connection at all.
.. versionchanged:: 0.9.9 A warning is emitted when the
``isolation_level`` execution option is used after a
transaction has been started with :meth:`.Connection.begin`
or similar.
.. note:: The ``isolation_level`` execution option is implicitly
reset if the :class:`.Connection` is invalidated, e.g. via
the :meth:`.Connection.invalidate` method, or if a
disconnection error occurs. The new connection produced after
the invalidation will not have the isolation level re-applied
to it automatically.
.. seealso::
:paramref:`.create_engine.isolation_level`
- set per :class:`.Engine` isolation level
:meth:`.Connection.get_isolation_level` - view current level
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
:ref:`PostgreSQL Transaction Isolation <postgresql_isolation_level>`
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
:ref:`SQL Server Transaction Isolation <mssql_isolation_level>`
:ref:`session_transaction_isolation` - for the ORM
:param no_parameters: When ``True``, if the final parameter
list or dictionary is totally empty, will invoke the
statement on the cursor as ``cursor.execute(statement)``,
not passing the parameter collection at all.
Some DBAPIs such as psycopg2 and mysql-python consider
percent signs as significant only when parameters are
present; this option allows code to generate SQL
containing percent signs (and possibly other characters)
that is neutral regarding whether it's executed by the DBAPI
or piped into a script that's later invoked by
command line tools.
.. versionadded:: 0.7.6
:param stream_results: Available on: Connection, statement.
Indicate to the dialect that results should be
"streamed" and not pre-buffered, if possible. This is a limitation
of many DBAPIs. The flag is currently understood only by the
psycopg2, mysqldb and pymysql dialects.
:param schema_translate_map: Available on: Connection, Engine.
A dictionary mapping schema names to schema names, that will be
applied to the :paramref:`.Table.schema` element of each
:class:`.Table` encountered when SQL or DDL expression elements
are compiled into strings; the resulting schema name will be
converted based on presence in the map of the original name.
.. versionadded:: 1.1
.. seealso::
:ref:`schema_translating`
"""
c = self._clone()
c._execution_options = c._execution_options.union(opt)
if self._has_events or self.engine._has_events:
self.dispatch.set_connection_execution_options(c, opt)
self.dialect.set_connection_execution_options(c, opt)
return c
@property
def closed(self):
"""Return True if this connection is closed."""
return '_Connection__connection' not in self.__dict__ \
and not self.__can_reconnect
@property
def invalidated(self):
"""Return True if this connection was invalidated."""
return self._root.__invalid
@property
def connection(self):
"""The underlying DB-API connection managed by this Connection.
.. seealso::
:ref:`dbapi_connections`
"""
try:
return self.__connection
except AttributeError:
# escape "except AttributeError" before revalidating
# to prevent misleading stacktraces in Py3K
pass
try:
return self._revalidate_connection()
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def get_isolation_level(self):
"""Return the current isolation level assigned to this
:class:`.Connection`.
This will typically be the default isolation level as determined
by the dialect, unless if the
:paramref:`.Connection.execution_options.isolation_level`
feature has been used to alter the isolation level on a
per-:class:`.Connection` basis.
This attribute will typically perform a live SQL operation in order
to procure the current isolation level, so the value returned is the
actual level on the underlying DBAPI connection regardless of how
this state was set. Compare to the
:attr:`.Connection.default_isolation_level` accessor
which returns the dialect-level setting without performing a SQL
query.
.. versionadded:: 0.9.9
.. seealso::
:attr:`.Connection.default_isolation_level` - view default level
:paramref:`.create_engine.isolation_level`
- set per :class:`.Engine` isolation level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`.Connection` isolation level
"""
try:
return self.dialect.get_isolation_level(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
@property
def default_isolation_level(self):
"""The default isolation level assigned to this :class:`.Connection`.
This is the isolation level setting that the :class:`.Connection`
has when first procured via the :meth:`.Engine.connect` method.
This level stays in place until the
:paramref:`.Connection.execution_options.isolation_level` is used
to change the setting on a per-:class:`.Connection` basis.
Unlike :meth:`.Connection.get_isolation_level`, this attribute is set
ahead of time from the first connection procured by the dialect,
so SQL query is not invoked when this accessor is called.
.. versionadded:: 0.9.9
.. seealso::
:meth:`.Connection.get_isolation_level` - view current level
:paramref:`.create_engine.isolation_level`
- set per :class:`.Engine` isolation level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`.Connection` isolation level
"""
return self.dialect.default_isolation_level
def _revalidate_connection(self):
if self.__branch_from:
return self.__branch_from._revalidate_connection()
if self.__can_reconnect and self.__invalid:
if self.__transaction is not None:
raise exc.InvalidRequestError(
"Can't reconnect until invalid "
"transaction is rolled back")
self.__connection = self.engine.raw_connection(_connection=self)
self.__invalid = False
return self.__connection
raise exc.ResourceClosedError("This Connection is closed")
@property
def _connection_is_valid(self):
# use getattr() for is_valid to support exceptions raised in
# dialect initializer, where the connection is not wrapped in
# _ConnectionFairy
return getattr(self.__connection, 'is_valid', False)
@property
def _still_open_and_connection_is_valid(self):
return \
not self.closed and \
not self.invalidated and \
getattr(self.__connection, 'is_valid', False)
@property
def info(self):
"""Info dictionary associated with the underlying DBAPI connection
referred to by this :class:`.Connection`, allowing user-defined
data to be associated with the connection.
The data here will follow along with the DBAPI connection including
after it is returned to the connection pool and used again
in subsequent instances of :class:`.Connection`.
"""
return self.connection.info
def connect(self):
"""Returns a branched version of this :class:`.Connection`.
The :meth:`.Connection.close` method on the returned
:class:`.Connection` can be called and this
:class:`.Connection` will remain open.
This method provides usage symmetry with
:meth:`.Engine.connect`, including for usage
with context managers.
"""
return self._branch()
def contextual_connect(self, **kwargs):
"""Returns a branched version of this :class:`.Connection`.
The :meth:`.Connection.close` method on the returned
:class:`.Connection` can be called and this
:class:`.Connection` will remain open.
This method provides usage symmetry with
:meth:`.Engine.contextual_connect`, including for usage
with context managers.
"""
return self._branch()
def invalidate(self, exception=None):
"""Invalidate the underlying DBAPI connection associated with
this :class:`.Connection`.
The underlying DBAPI connection is literally closed (if
possible), and is discarded. Its source connection pool will
typically lazily create a new connection to replace it.
Upon the next use (where "use" typically means using the
:meth:`.Connection.execute` method or similar),
this :class:`.Connection` will attempt to
procure a new DBAPI connection using the services of the
:class:`.Pool` as a source of connectivity (e.g. a "reconnection").
If a transaction was in progress (e.g. the
:meth:`.Connection.begin` method has been called) when
:meth:`.Connection.invalidate` method is called, at the DBAPI
level all state associated with this transaction is lost, as
the DBAPI connection is closed. The :class:`.Connection`
will not allow a reconnection to proceed until the
:class:`.Transaction` object is ended, by calling the
:meth:`.Transaction.rollback` method; until that point, any attempt at
continuing to use the :class:`.Connection` will raise an
:class:`~sqlalchemy.exc.InvalidRequestError`.
This is to prevent applications from accidentally
continuing an ongoing transactional operations despite the
fact that the transaction has been lost due to an
invalidation.
The :meth:`.Connection.invalidate` method, just like auto-invalidation,
will at the connection pool level invoke the
:meth:`.PoolEvents.invalidate` event.
.. seealso::
:ref:`pool_connection_invalidation`
"""
if self.invalidated:
return
if self.closed:
raise exc.ResourceClosedError("This Connection is closed")
if self._root._connection_is_valid:
self._root.__connection.invalidate(exception)
del self._root.__connection
self._root.__invalid = True
def detach(self):
"""Detach the underlying DB-API connection from its connection pool.
E.g.::
with engine.connect() as conn:
conn.detach()
conn.execute("SET search_path TO schema1, schema2")
# work with connection
# connection is fully closed (since we used "with:", can
# also call .close())
This :class:`.Connection` instance will remain usable. When closed
(or exited from a context manager context as above),
the DB-API connection will be literally closed and not
returned to its originating pool.
This method can be used to insulate the rest of an application
from a modified state on a connection (such as a transaction
isolation level or similar).
"""
self.__connection.detach()
def begin(self):
"""Begin a transaction and return a transaction handle.
The returned object is an instance of :class:`.Transaction`.
This object represents the "scope" of the transaction,
which completes when either the :meth:`.Transaction.rollback`
or :meth:`.Transaction.commit` method is called.
Nested calls to :meth:`.begin` on the same :class:`.Connection`
will return new :class:`.Transaction` objects that represent
an emulated transaction within the scope of the enclosing
transaction, that is::
trans = conn.begin() # outermost transaction
trans2 = conn.begin() # "nested"
trans2.commit() # does nothing
trans.commit() # actually commits
Calls to :meth:`.Transaction.commit` only have an effect
when invoked via the outermost :class:`.Transaction` object, though the
:meth:`.Transaction.rollback` method of any of the
:class:`.Transaction` objects will roll back the
transaction.
See also:
:meth:`.Connection.begin_nested` - use a SAVEPOINT
:meth:`.Connection.begin_twophase` - use a two phase /XID transaction
:meth:`.Engine.begin` - context manager available from
:class:`.Engine`.
"""
if self.__branch_from:
return self.__branch_from.begin()
if self.__transaction is None:
self.__transaction = RootTransaction(self)
return self.__transaction
else:
return Transaction(self, self.__transaction)
def begin_nested(self):
"""Begin a nested transaction and return a transaction handle.
The returned object is an instance of :class:`.NestedTransaction`.
Nested transactions require SAVEPOINT support in the
underlying database. Any transaction in the hierarchy may
``commit`` and ``rollback``, however the outermost transaction
still controls the overall ``commit`` or ``rollback`` of the
transaction of a whole.
See also :meth:`.Connection.begin`,
:meth:`.Connection.begin_twophase`.
"""
if self.__branch_from:
return self.__branch_from.begin_nested()
if self.__transaction is None:
self.__transaction = RootTransaction(self)
else:
self.__transaction = NestedTransaction(self, self.__transaction)
return self.__transaction
def begin_twophase(self, xid=None):
"""Begin a two-phase or XA transaction and return a transaction
handle.
The returned object is an instance of :class:`.TwoPhaseTransaction`,
which in addition to the methods provided by
:class:`.Transaction`, also provides a
:meth:`~.TwoPhaseTransaction.prepare` method.
:param xid: the two phase transaction id. If not supplied, a
random id will be generated.
See also :meth:`.Connection.begin`,
:meth:`.Connection.begin_twophase`.
"""
if self.__branch_from:
return self.__branch_from.begin_twophase(xid=xid)
if self.__transaction is not None:
raise exc.InvalidRequestError(
"Cannot start a two phase transaction when a transaction "
"is already in progress.")
if xid is None:
xid = self.engine.dialect.create_xid()
self.__transaction = TwoPhaseTransaction(self, xid)
return self.__transaction
def recover_twophase(self):
return self.engine.dialect.do_recover_twophase(self)
def rollback_prepared(self, xid, recover=False):
self.engine.dialect.do_rollback_twophase(self, xid, recover=recover)
def commit_prepared(self, xid, recover=False):
self.engine.dialect.do_commit_twophase(self, xid, recover=recover)
def in_transaction(self):
"""Return True if a transaction is in progress."""
return self._root.__transaction is not None
def _begin_impl(self, transaction):
assert not self.__branch_from
if self._echo:
self.engine.logger.info("BEGIN (implicit)")
if self._has_events or self.engine._has_events:
self.dispatch.begin(self)
try:
self.engine.dialect.do_begin(self.connection)
if self.connection._reset_agent is None:
self.connection._reset_agent = transaction
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _rollback_impl(self):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback(self)
if self._still_open_and_connection_is_valid:
if self._echo:
self.engine.logger.info("ROLLBACK")
try:
self.engine.dialect.do_rollback(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
finally:
if not self.__invalid and \
self.connection._reset_agent is self.__transaction:
self.connection._reset_agent = None
self.__transaction = None
else:
self.__transaction = None
def _commit_impl(self, autocommit=False):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.commit(self)
if self._echo:
self.engine.logger.info("COMMIT")
try:
self.engine.dialect.do_commit(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
finally:
if not self.__invalid and \
self.connection._reset_agent is self.__transaction:
self.connection._reset_agent = None
self.__transaction = None
def _savepoint_impl(self, name=None):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.savepoint(self, name)
if name is None:
self.__savepoint_seq += 1
name = 'sa_savepoint_%s' % self.__savepoint_seq
if self._still_open_and_connection_is_valid:
self.engine.dialect.do_savepoint(self, name)
return name
def _rollback_to_savepoint_impl(self, name, context):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback_savepoint(self, name, context)
if self._still_open_and_connection_is_valid:
self.engine.dialect.do_rollback_to_savepoint(self, name)
self.__transaction = context
def _release_savepoint_impl(self, name, context):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.release_savepoint(self, name, context)
if self._still_open_and_connection_is_valid:
self.engine.dialect.do_release_savepoint(self, name)
self.__transaction = context
def _begin_twophase_impl(self, transaction):
assert not self.__branch_from
if self._echo:
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
if self._has_events or self.engine._has_events:
self.dispatch.begin_twophase(self, transaction.xid)
if self._still_open_and_connection_is_valid:
self.engine.dialect.do_begin_twophase(self, transaction.xid)
if self.connection._reset_agent is None:
self.connection._reset_agent = transaction
def _prepare_twophase_impl(self, xid):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.prepare_twophase(self, xid)
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
self.engine.dialect.do_prepare_twophase(self, xid)
def _rollback_twophase_impl(self, xid, is_prepared):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback_twophase(self, xid, is_prepared)
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
try:
self.engine.dialect.do_rollback_twophase(
self, xid, is_prepared)
finally:
if self.connection._reset_agent is self.__transaction:
self.connection._reset_agent = None
self.__transaction = None
else:
self.__transaction = None
def _commit_twophase_impl(self, xid, is_prepared):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.commit_twophase(self, xid, is_prepared)
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
try:
self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
finally:
if self.connection._reset_agent is self.__transaction:
self.connection._reset_agent = None
self.__transaction = None
else:
self.__transaction = None
def _autorollback(self):
if not self._root.in_transaction():
self._root._rollback_impl()
def close(self):
"""Close this :class:`.Connection`.
This results in a release of the underlying database
resources, that is, the DBAPI connection referenced
internally. The DBAPI connection is typically restored
back to the connection-holding :class:`.Pool` referenced
by the :class:`.Engine` that produced this
:class:`.Connection`. Any transactional state present on
the DBAPI connection is also unconditionally released via
the DBAPI connection's ``rollback()`` method, regardless
of any :class:`.Transaction` object that may be
outstanding with regards to this :class:`.Connection`.
After :meth:`~.Connection.close` is called, the
:class:`.Connection` is permanently in a closed state,
and will allow no further operations.
"""
if self.__branch_from:
try:
del self.__connection
except AttributeError:
pass
finally:
self.__can_reconnect = False
return
try:
conn = self.__connection
except AttributeError:
pass
else:
conn.close()
if conn._reset_agent is self.__transaction:
conn._reset_agent = None
# the close() process can end up invalidating us,
# as the pool will call our transaction as the "reset_agent"
# for rollback(), which can then cause an invalidation
if not self.__invalid:
del self.__connection
self.__can_reconnect = False
self.__transaction = None
def scalar(self, object, *multiparams, **params):
"""Executes and returns the first column of the first row.
The underlying result/cursor is closed after execution.
"""
return self.execute(object, *multiparams, **params).scalar()
def execute(self, object, *multiparams, **params):
r"""Executes a SQL statement construct and returns a
:class:`.ResultProxy`.
:param object: The statement to be executed. May be
one of:
* a plain string
* any :class:`.ClauseElement` construct that is also
a subclass of :class:`.Executable`, such as a
:func:`~.expression.select` construct
* a :class:`.FunctionElement`, such as that generated
by :data:`.func`, will be automatically wrapped in
a SELECT statement, which is then executed.
* a :class:`.DDLElement` object
* a :class:`.DefaultGenerator` object
* a :class:`.Compiled` object
:param \*multiparams/\**params: represent bound parameter
values to be used in the execution. Typically,
the format is either a collection of one or more
dictionaries passed to \*multiparams::
conn.execute(
table.insert(),
{"id":1, "value":"v1"},
{"id":2, "value":"v2"}
)
...or individual key/values interpreted by \**params::
conn.execute(
table.insert(), id=1, value="v1"
)
In the case that a plain SQL string is passed, and the underlying
DBAPI accepts positional bind parameters, a collection of tuples
or individual values in \*multiparams may be passed::
conn.execute(
"INSERT INTO table (id, value) VALUES (?, ?)",
(1, "v1"), (2, "v2")
)
conn.execute(
"INSERT INTO table (id, value) VALUES (?, ?)",
1, "v1"
)
Note above, the usage of a question mark "?" or other
symbol is contingent upon the "paramstyle" accepted by the DBAPI
in use, which may be any of "qmark", "named", "pyformat", "format",
"numeric". See `pep-249 <http://www.python.org/dev/peps/pep-0249/>`_
for details on paramstyle.
To execute a textual SQL statement which uses bound parameters in a
DBAPI-agnostic way, use the :func:`~.expression.text` construct.
"""
if isinstance(object, util.string_types[0]):
return self._execute_text(object, multiparams, params)
try:
meth = object._execute_on_connection
except AttributeError:
raise exc.ObjectNotExecutableError(object)
else:
return meth(self, multiparams, params)
def _execute_function(self, func, multiparams, params):
"""Execute a sql.FunctionElement object."""
return self._execute_clauseelement(func.select(),
multiparams, params)
def _execute_default(self, default, multiparams, params):
"""Execute a schema.ColumnDefault object."""
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_execute:
default, multiparams, params = \
fn(self, default, multiparams, params)
try:
try:
conn = self.__connection
except AttributeError:
# escape "except AttributeError" before revalidating
# to prevent misleading stacktraces in Py3K
conn = None
if conn is None:
conn = self._revalidate_connection()
dialect = self.dialect
ctx = dialect.execution_ctx_cls._init_default(
dialect, self, conn)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
ret = ctx._exec_default(None, default, None)
if self.should_close_with_result:
self.close()
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
default, multiparams, params, ret)
return ret
def _execute_ddl(self, ddl, multiparams, params):
"""Execute a schema.DDL object."""
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_execute:
ddl, multiparams, params = \
fn(self, ddl, multiparams, params)
dialect = self.dialect
compiled = ddl.compile(
dialect=dialect,
schema_translate_map=self.schema_for_object
if not self.schema_for_object.is_default else None)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_ddl,
compiled,
None,
compiled
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
ddl, multiparams, params, ret)
return ret
def _execute_clauseelement(self, elem, multiparams, params):
"""Execute a sql.ClauseElement object."""
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_execute:
elem, multiparams, params = \
fn(self, elem, multiparams, params)
distilled_params = _distill_params(multiparams, params)
if distilled_params:
# ensure we don't retain a link to the view object for keys()
# which links to the values, which we don't want to cache
keys = list(distilled_params[0].keys())
else:
keys = []
dialect = self.dialect
if 'compiled_cache' in self._execution_options:
key = (
dialect, elem, tuple(sorted(keys)),
self.schema_for_object.hash_key,
len(distilled_params) > 1
)
compiled_sql = self._execution_options['compiled_cache'].get(key)
if compiled_sql is None:
compiled_sql = elem.compile(
dialect=dialect, column_keys=keys,
inline=len(distilled_params) > 1,
schema_translate_map=self.schema_for_object
if not self.schema_for_object.is_default else None
)
self._execution_options['compiled_cache'][key] = compiled_sql
else:
compiled_sql = elem.compile(
dialect=dialect, column_keys=keys,
inline=len(distilled_params) > 1,
schema_translate_map=self.schema_for_object
if not self.schema_for_object.is_default else None)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_compiled,
compiled_sql,
distilled_params,
compiled_sql, distilled_params
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
elem, multiparams, params, ret)
return ret
def _execute_compiled(self, compiled, multiparams, params):
"""Execute a sql.Compiled object."""
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_execute:
compiled, multiparams, params = \
fn(self, compiled, multiparams, params)
dialect = self.dialect
parameters = _distill_params(multiparams, params)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_compiled,
compiled,
parameters,
compiled, parameters
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
compiled, multiparams, params, ret)
return ret
def _execute_text(self, statement, multiparams, params):
"""Execute a string SQL statement."""
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_execute:
statement, multiparams, params = \
fn(self, statement, multiparams, params)
dialect = self.dialect
parameters = _distill_params(multiparams, params)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_statement,
statement,
parameters,
statement, parameters
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
statement, multiparams, params, ret)
return ret
def _execute_context(self, dialect, constructor,
statement, parameters,
*args):
"""Create an :class:`.ExecutionContext` and execute, returning
a :class:`.ResultProxy`."""
try:
try:
conn = self.__connection
except AttributeError:
# escape "except AttributeError" before revalidating
# to prevent misleading stacktraces in Py3K
conn = None
if conn is None:
conn = self._revalidate_connection()
context = constructor(dialect, self, conn, *args)
except BaseException as e:
self._handle_dbapi_exception(
e,
util.text_type(statement), parameters,
None, None)
if context.compiled:
context.pre_exec()
cursor, statement, parameters = context.cursor, \
context.statement, \
context.parameters
if not context.executemany:
parameters = parameters[0]
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = \
fn(self, cursor, statement, parameters,
context, context.executemany)
if self._echo:
self.engine.logger.info(statement)
self.engine.logger.info(
"%r",
sql_util._repr_params(parameters, batches=10)
)
evt_handled = False
try:
if context.executemany:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_executemany:
if fn(cursor, statement, parameters, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_executemany(
cursor,
statement,
parameters,
context)
elif not parameters and context.no_parameters:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_execute_no_params:
if fn(cursor, statement, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_execute_no_params(
cursor,
statement,
context)
else:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_execute:
if fn(cursor, statement, parameters, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_execute(
cursor,
statement,
parameters,
context)
except BaseException as e:
self._handle_dbapi_exception(
e,
statement,
parameters,
cursor,
context)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(self, cursor,
statement,
parameters,
context,
context.executemany)
if context.compiled:
context.post_exec()
if context.is_crud or context.is_text:
result = context._setup_crud_result_proxy()
else:
result = context.get_result_proxy()
if result._metadata is None:
result._soft_close()
if context.should_autocommit and self._root.__transaction is None:
self._root._commit_impl(autocommit=True)
# for "connectionless" execution, we have to close this
# Connection after the statement is complete.
if self.should_close_with_result:
# ResultProxy already exhausted rows / has no rows.
# close us now
if result._soft_closed:
self.close()
else:
# ResultProxy will close this Connection when no more
# rows to fetch.
result._autoclose_connection = True
return result
def _cursor_execute(self, cursor, statement, parameters, context=None):
"""Execute a statement + params on the given cursor.
Adds appropriate logging and exception handling.
This method is used by DefaultDialect for special-case
executions, such as for sequences and column defaults.
The path of statement execution in the majority of cases
terminates at _execute_context().
"""
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = \
fn(self, cursor, statement, parameters,
context,
False)
if self._echo:
self.engine.logger.info(statement)
self.engine.logger.info("%r", parameters)
try:
for fn in () if not self.dialect._has_events \
else self.dialect.dispatch.do_execute:
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_execute(
cursor,
statement,
parameters,
context)
except BaseException as e:
self._handle_dbapi_exception(
e,
statement,
parameters,
cursor,
context)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(self, cursor,
statement,
parameters,
context,
False)
def _safe_close_cursor(self, cursor):
"""Close the given cursor, catching exceptions
and turning into log warnings.
"""
try:
cursor.close()
except Exception:
# log the error through the connection pool's logger.
self.engine.pool.logger.error(
"Error closing cursor", exc_info=True)
_reentrant_error = False
_is_disconnect = False
def _handle_dbapi_exception(self,
e,
statement,
parameters,
cursor,
context):
exc_info = sys.exc_info()
if context and context.exception is None:
context.exception = e
is_exit_exception = not isinstance(e, Exception)
if not self._is_disconnect:
self._is_disconnect = (
isinstance(e, self.dialect.dbapi.Error) and
not self.closed and
self.dialect.is_disconnect(
e,
self.__connection if not self.invalidated else None,
cursor)
) or (
is_exit_exception and not self.closed
)
if context:
context.is_disconnect = self._is_disconnect
invalidate_pool_on_disconnect = not is_exit_exception
if self._reentrant_error:
util.raise_from_cause(
exc.DBAPIError.instance(statement,
parameters,
e,
self.dialect.dbapi.Error,
dialect=self.dialect),
exc_info
)
self._reentrant_error = True
try:
# non-DBAPI error - if we already got a context,
# or there's no string statement, don't wrap it
should_wrap = isinstance(e, self.dialect.dbapi.Error) or \
(statement is not None
and context is None and not is_exit_exception)
if should_wrap:
sqlalchemy_exception = exc.DBAPIError.instance(
statement,
parameters,
e,
self.dialect.dbapi.Error,
connection_invalidated=self._is_disconnect,
dialect=self.dialect)
else:
sqlalchemy_exception = None
newraise = None
if (self._has_events or self.engine._has_events) and \
not self._execution_options.get(
'skip_user_error_events', False):
# legacy dbapi_error event
if should_wrap and context:
self.dispatch.dbapi_error(self,
cursor,
statement,
parameters,
context,
e)
# new handle_error event
ctx = ExceptionContextImpl(
e, sqlalchemy_exception, self.engine,
self, cursor, statement,
parameters, context, self._is_disconnect,
invalidate_pool_on_disconnect)
for fn in self.dispatch.handle_error:
try:
# handler returns an exception;
# call next handler in a chain
per_fn = fn(ctx)
if per_fn is not None:
ctx.chained_exception = newraise = per_fn
except Exception as _raised:
# handler raises an exception - stop processing
newraise = _raised
break
if self._is_disconnect != ctx.is_disconnect:
self._is_disconnect = ctx.is_disconnect
if sqlalchemy_exception:
sqlalchemy_exception.connection_invalidated = \
ctx.is_disconnect
# set up potentially user-defined value for
# invalidate pool.
invalidate_pool_on_disconnect = \
ctx.invalidate_pool_on_disconnect
if should_wrap and context:
context.handle_dbapi_exception(e)
if not self._is_disconnect:
if cursor:
self._safe_close_cursor(cursor)
with util.safe_reraise(warn_only=True):
self._autorollback()
if newraise:
util.raise_from_cause(newraise, exc_info)
elif should_wrap:
util.raise_from_cause(
sqlalchemy_exception,
exc_info
)
else:
util.reraise(*exc_info)
finally:
del self._reentrant_error
if self._is_disconnect:
del self._is_disconnect
if not self.invalidated:
dbapi_conn_wrapper = self.__connection
if invalidate_pool_on_disconnect:
self.engine.pool._invalidate(dbapi_conn_wrapper, e)
self.invalidate(e)
if self.should_close_with_result:
self.close()
@classmethod
def _handle_dbapi_exception_noconnection(cls, e, dialect, engine):
exc_info = sys.exc_info()
is_disconnect = dialect.is_disconnect(e, None, None)
should_wrap = isinstance(e, dialect.dbapi.Error)
if should_wrap:
sqlalchemy_exception = exc.DBAPIError.instance(
None,
None,
e,
dialect.dbapi.Error,
connection_invalidated=is_disconnect)
else:
sqlalchemy_exception = None
newraise = None
if engine._has_events:
ctx = ExceptionContextImpl(
e, sqlalchemy_exception, engine, None, None, None,
None, None, is_disconnect, True)
for fn in engine.dispatch.handle_error:
try:
# handler returns an exception;
# call next handler in a chain
per_fn = fn(ctx)
if per_fn is not None:
ctx.chained_exception = newraise = per_fn
except Exception as _raised:
# handler raises an exception - stop processing
newraise = _raised
break
if sqlalchemy_exception and \
is_disconnect != ctx.is_disconnect:
sqlalchemy_exception.connection_invalidated = \
is_disconnect = ctx.is_disconnect
if newraise:
util.raise_from_cause(newraise, exc_info)
elif should_wrap:
util.raise_from_cause(
sqlalchemy_exception,
exc_info
)
else:
util.reraise(*exc_info)
def transaction(self, callable_, *args, **kwargs):
r"""Execute the given function within a transaction boundary.
The function is passed this :class:`.Connection`
as the first argument, followed by the given \*args and \**kwargs,
e.g.::
def do_something(conn, x, y):
conn.execute("some statement", {'x':x, 'y':y})
conn.transaction(do_something, 5, 10)
The operations inside the function are all invoked within the
context of a single :class:`.Transaction`.
Upon success, the transaction is committed. If an
exception is raised, the transaction is rolled back
before propagating the exception.
.. note::
The :meth:`.transaction` method is superseded by
the usage of the Python ``with:`` statement, which can
be used with :meth:`.Connection.begin`::
with conn.begin():
conn.execute("some statement", {'x':5, 'y':10})
As well as with :meth:`.Engine.begin`::
with engine.begin() as conn:
conn.execute("some statement", {'x':5, 'y':10})
See also:
:meth:`.Engine.begin` - engine-level transactional
context
:meth:`.Engine.transaction` - engine-level version of
:meth:`.Connection.transaction`
"""
trans = self.begin()
try:
ret = self.run_callable(callable_, *args, **kwargs)
trans.commit()
return ret
except:
with util.safe_reraise():
trans.rollback()
def run_callable(self, callable_, *args, **kwargs):
r"""Given a callable object or function, execute it, passing
a :class:`.Connection` as the first argument.
The given \*args and \**kwargs are passed subsequent
to the :class:`.Connection` argument.
This function, along with :meth:`.Engine.run_callable`,
allows a function to be run with a :class:`.Connection`
or :class:`.Engine` object without the need to know
which one is being dealt with.
"""
return callable_(self, *args, **kwargs)
def _run_visitor(self, visitorcallable, element, **kwargs):
visitorcallable(self.dialect, self,
**kwargs).traverse_single(element)
class ExceptionContextImpl(ExceptionContext):
"""Implement the :class:`.ExceptionContext` interface."""
def __init__(self, exception, sqlalchemy_exception,
engine, connection, cursor, statement, parameters,
context, is_disconnect, invalidate_pool_on_disconnect):
self.engine = engine
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
self.execution_context = context
self.statement = statement
self.parameters = parameters
self.is_disconnect = is_disconnect
self.invalidate_pool_on_disconnect = invalidate_pool_on_disconnect
class Transaction(object):
"""Represent a database transaction in progress.
The :class:`.Transaction` object is procured by
calling the :meth:`~.Connection.begin` method of
:class:`.Connection`::
from sqlalchemy import create_engine
engine = create_engine("postgresql://scott:tiger@localhost/test")
connection = engine.connect()
trans = connection.begin()
connection.execute("insert into x (a, b) values (1, 2)")
trans.commit()
The object provides :meth:`.rollback` and :meth:`.commit`
methods in order to control transaction boundaries. It
also implements a context manager interface so that
the Python ``with`` statement can be used with the
:meth:`.Connection.begin` method::
with connection.begin():
connection.execute("insert into x (a, b) values (1, 2)")
The Transaction object is **not** threadsafe.
See also: :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`,
:meth:`.Connection.begin_nested`.
.. index::
single: thread safety; Transaction
"""
def __init__(self, connection, parent):
self.connection = connection
self._actual_parent = parent
self.is_active = True
@property
def _parent(self):
return self._actual_parent or self
def close(self):
"""Close this :class:`.Transaction`.
If this transaction is the base transaction in a begin/commit
nesting, the transaction will rollback(). Otherwise, the
method returns.
This is used to cancel a Transaction without affecting the scope of
an enclosing transaction.
"""
if not self._parent.is_active:
return
if self._parent is self:
self.rollback()
def rollback(self):
"""Roll back this :class:`.Transaction`.
"""
if not self._parent.is_active:
return
self._do_rollback()
self.is_active = False
def _do_rollback(self):
self._parent.rollback()
def commit(self):
"""Commit this :class:`.Transaction`."""
if not self._parent.is_active:
raise exc.InvalidRequestError("This transaction is inactive")
self._do_commit()
self.is_active = False
def _do_commit(self):
pass
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if type is None and self.is_active:
try:
self.commit()
except:
with util.safe_reraise():
self.rollback()
else:
self.rollback()
class RootTransaction(Transaction):
def __init__(self, connection):
super(RootTransaction, self).__init__(connection, None)
self.connection._begin_impl(self)
def _do_rollback(self):
if self.is_active:
self.connection._rollback_impl()
def _do_commit(self):
if self.is_active:
self.connection._commit_impl()
class NestedTransaction(Transaction):
"""Represent a 'nested', or SAVEPOINT transaction.
A new :class:`.NestedTransaction` object may be procured
using the :meth:`.Connection.begin_nested` method.
The interface is the same as that of :class:`.Transaction`.
"""
def __init__(self, connection, parent):
super(NestedTransaction, self).__init__(connection, parent)
self._savepoint = self.connection._savepoint_impl()
def _do_rollback(self):
if self.is_active:
self.connection._rollback_to_savepoint_impl(
self._savepoint, self._parent)
def _do_commit(self):
if self.is_active:
self.connection._release_savepoint_impl(
self._savepoint, self._parent)
class TwoPhaseTransaction(Transaction):
"""Represent a two-phase transaction.
A new :class:`.TwoPhaseTransaction` object may be procured
using the :meth:`.Connection.begin_twophase` method.
The interface is the same as that of :class:`.Transaction`
with the addition of the :meth:`prepare` method.
"""
def __init__(self, connection, xid):
super(TwoPhaseTransaction, self).__init__(connection, None)
self._is_prepared = False
self.xid = xid
self.connection._begin_twophase_impl(self)
def prepare(self):
"""Prepare this :class:`.TwoPhaseTransaction`.
After a PREPARE, the transaction can be committed.
"""
if not self._parent.is_active:
raise exc.InvalidRequestError("This transaction is inactive")
self.connection._prepare_twophase_impl(self.xid)
self._is_prepared = True
def _do_rollback(self):
self.connection._rollback_twophase_impl(self.xid, self._is_prepared)
def _do_commit(self):
self.connection._commit_twophase_impl(self.xid, self._is_prepared)
class Engine(Connectable, log.Identified):
"""
Connects a :class:`~sqlalchemy.pool.Pool` and
:class:`~sqlalchemy.engine.interfaces.Dialect` together to provide a
source of database connectivity and behavior.
An :class:`.Engine` object is instantiated publicly using the
:func:`~sqlalchemy.create_engine` function.
See also:
:doc:`/core/engines`
:ref:`connections_toplevel`
"""
_execution_options = util.immutabledict()
_has_events = False
_connection_cls = Connection
schema_for_object = schema._schema_getter(None)
"""Return the ".schema" attribute for an object.
Used for :class:`.Table`, :class:`.Sequence` and similar objects,
and takes into account
the :paramref:`.Connection.execution_options.schema_translate_map`
parameter.
.. versionadded:: 1.1
.. seealso::
:ref:`schema_translating`
"""
def __init__(self, pool, dialect, url,
logging_name=None, echo=None, proxy=None,
execution_options=None
):
self.pool = pool
self.url = url
self.dialect = dialect
if logging_name:
self.logging_name = logging_name
self.echo = echo
self.engine = self
log.instance_logger(self, echoflag=echo)
if proxy:
interfaces.ConnectionProxy._adapt_listener(self, proxy)
if execution_options:
self.update_execution_options(**execution_options)
def update_execution_options(self, **opt):
r"""Update the default execution_options dictionary
of this :class:`.Engine`.
The given keys/values in \**opt are added to the
default execution options that will be used for
all connections. The initial contents of this dictionary
can be sent via the ``execution_options`` parameter
to :func:`.create_engine`.
.. seealso::
:meth:`.Connection.execution_options`
:meth:`.Engine.execution_options`
"""
self._execution_options = \
self._execution_options.union(opt)
self.dispatch.set_engine_execution_options(self, opt)
self.dialect.set_engine_execution_options(self, opt)
def execution_options(self, **opt):
"""Return a new :class:`.Engine` that will provide
:class:`.Connection` objects with the given execution options.
The returned :class:`.Engine` remains related to the original
:class:`.Engine` in that it shares the same connection pool and
other state:
* The :class:`.Pool` used by the new :class:`.Engine` is the
same instance. The :meth:`.Engine.dispose` method will replace
the connection pool instance for the parent engine as well
as this one.
* Event listeners are "cascaded" - meaning, the new :class:`.Engine`
inherits the events of the parent, and new events can be associated
with the new :class:`.Engine` individually.
* The logging configuration and logging_name is copied from the parent
:class:`.Engine`.
The intent of the :meth:`.Engine.execution_options` method is
to implement "sharding" schemes where multiple :class:`.Engine`
objects refer to the same connection pool, but are differentiated
by options that would be consumed by a custom event::
primary_engine = create_engine("mysql://")
shard1 = primary_engine.execution_options(shard_id="shard1")
shard2 = primary_engine.execution_options(shard_id="shard2")
Above, the ``shard1`` engine serves as a factory for
:class:`.Connection` objects that will contain the execution option
``shard_id=shard1``, and ``shard2`` will produce :class:`.Connection`
objects that contain the execution option ``shard_id=shard2``.
An event handler can consume the above execution option to perform
a schema switch or other operation, given a connection. Below
we emit a MySQL ``use`` statement to switch databases, at the same
time keeping track of which database we've established using the
:attr:`.Connection.info` dictionary, which gives us a persistent
storage space that follows the DBAPI connection::
from sqlalchemy import event
from sqlalchemy.engine import Engine
shards = {"default": "base", shard_1: "db1", "shard_2": "db2"}
@event.listens_for(Engine, "before_cursor_execute")
def _switch_shard(conn, cursor, stmt,
params, context, executemany):
shard_id = conn._execution_options.get('shard_id', "default")
current_shard = conn.info.get("current_shard", None)
if current_shard != shard_id:
cursor.execute("use %s" % shards[shard_id])
conn.info["current_shard"] = shard_id
.. versionadded:: 0.8
.. seealso::
:meth:`.Connection.execution_options` - update execution options
on a :class:`.Connection` object.
:meth:`.Engine.update_execution_options` - update the execution
options for a given :class:`.Engine` in place.
"""
return OptionEngine(self, opt)
@property
def name(self):
"""String name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
in use by this :class:`Engine`."""
return self.dialect.name
@property
def driver(self):
"""Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
in use by this :class:`Engine`."""
return self.dialect.driver
echo = log.echo_property()
def __repr__(self):
return 'Engine(%r)' % self.url
def dispose(self):
"""Dispose of the connection pool used by this :class:`.Engine`.
This has the effect of fully closing all **currently checked in**
database connections. Connections that are still checked out
will **not** be closed, however they will no longer be associated
with this :class:`.Engine`, so when they are closed individually,
eventually the :class:`.Pool` which they are associated with will
be garbage collected and they will be closed out fully, if
not already closed on checkin.
A new connection pool is created immediately after the old one has
been disposed. This new pool, like all SQLAlchemy connection pools,
does not make any actual connections to the database until one is
first requested, so as long as the :class:`.Engine` isn't used again,
no new connections will be made.
.. seealso::
:ref:`engine_disposal`
"""
self.pool.dispose()
self.pool = self.pool.recreate()
self.dispatch.engine_disposed(self)
def _execute_default(self, default):
with self.contextual_connect() as conn:
return conn._execute_default(default, (), {})
@contextlib.contextmanager
def _optional_conn_ctx_manager(self, connection=None):
if connection is None:
with self.contextual_connect() as conn:
yield conn
else:
yield connection
def _run_visitor(self, visitorcallable, element,
connection=None, **kwargs):
with self._optional_conn_ctx_manager(connection) as conn:
conn._run_visitor(visitorcallable, element, **kwargs)
class _trans_ctx(object):
def __init__(self, conn, transaction, close_with_result):
self.conn = conn
self.transaction = transaction
self.close_with_result = close_with_result
def __enter__(self):
return self.conn
def __exit__(self, type, value, traceback):
if type is not None:
self.transaction.rollback()
else:
self.transaction.commit()
if not self.close_with_result:
self.conn.close()
def begin(self, close_with_result=False):
"""Return a context manager delivering a :class:`.Connection`
with a :class:`.Transaction` established.
E.g.::
with engine.begin() as conn:
conn.execute("insert into table (x, y, z) values (1, 2, 3)")
conn.execute("my_special_procedure(5)")
Upon successful operation, the :class:`.Transaction`
is committed. If an error is raised, the :class:`.Transaction`
is rolled back.
The ``close_with_result`` flag is normally ``False``, and indicates
that the :class:`.Connection` will be closed when the operation
is complete. When set to ``True``, it indicates the
:class:`.Connection` is in "single use" mode, where the
:class:`.ResultProxy` returned by the first call to
:meth:`.Connection.execute` will close the :class:`.Connection` when
that :class:`.ResultProxy` has exhausted all result rows.
.. versionadded:: 0.7.6
See also:
:meth:`.Engine.connect` - procure a :class:`.Connection` from
an :class:`.Engine`.
:meth:`.Connection.begin` - start a :class:`.Transaction`
for a particular :class:`.Connection`.
"""
conn = self.contextual_connect(close_with_result=close_with_result)
try:
trans = conn.begin()
except:
with util.safe_reraise():
conn.close()
return Engine._trans_ctx(conn, trans, close_with_result)
def transaction(self, callable_, *args, **kwargs):
r"""Execute the given function within a transaction boundary.
The function is passed a :class:`.Connection` newly procured
from :meth:`.Engine.contextual_connect` as the first argument,
followed by the given \*args and \**kwargs.
e.g.::
def do_something(conn, x, y):
conn.execute("some statement", {'x':x, 'y':y})
engine.transaction(do_something, 5, 10)
The operations inside the function are all invoked within the
context of a single :class:`.Transaction`.
Upon success, the transaction is committed. If an
exception is raised, the transaction is rolled back
before propagating the exception.
.. note::
The :meth:`.transaction` method is superseded by
the usage of the Python ``with:`` statement, which can
be used with :meth:`.Engine.begin`::
with engine.begin() as conn:
conn.execute("some statement", {'x':5, 'y':10})
See also:
:meth:`.Engine.begin` - engine-level transactional
context
:meth:`.Connection.transaction` - connection-level version of
:meth:`.Engine.transaction`
"""
with self.contextual_connect() as conn:
return conn.transaction(callable_, *args, **kwargs)
def run_callable(self, callable_, *args, **kwargs):
r"""Given a callable object or function, execute it, passing
a :class:`.Connection` as the first argument.
The given \*args and \**kwargs are passed subsequent
to the :class:`.Connection` argument.
This function, along with :meth:`.Connection.run_callable`,
allows a function to be run with a :class:`.Connection`
or :class:`.Engine` object without the need to know
which one is being dealt with.
"""
with self.contextual_connect() as conn:
return conn.run_callable(callable_, *args, **kwargs)
def execute(self, statement, *multiparams, **params):
"""Executes the given construct and returns a :class:`.ResultProxy`.
The arguments are the same as those used by
:meth:`.Connection.execute`.
Here, a :class:`.Connection` is acquired using the
:meth:`~.Engine.contextual_connect` method, and the statement executed
with that connection. The returned :class:`.ResultProxy` is flagged
such that when the :class:`.ResultProxy` is exhausted and its
underlying cursor is closed, the :class:`.Connection` created here
will also be closed, which allows its associated DBAPI connection
resource to be returned to the connection pool.
"""
connection = self.contextual_connect(close_with_result=True)
return connection.execute(statement, *multiparams, **params)
def scalar(self, statement, *multiparams, **params):
return self.execute(statement, *multiparams, **params).scalar()
def _execute_clauseelement(self, elem, multiparams=None, params=None):
connection = self.contextual_connect(close_with_result=True)
return connection._execute_clauseelement(elem, multiparams, params)
def _execute_compiled(self, compiled, multiparams, params):
connection = self.contextual_connect(close_with_result=True)
return connection._execute_compiled(compiled, multiparams, params)
def connect(self, **kwargs):
"""Return a new :class:`.Connection` object.
The :class:`.Connection` object is a facade that uses a DBAPI
connection internally in order to communicate with the database. This
connection is procured from the connection-holding :class:`.Pool`
referenced by this :class:`.Engine`. When the
:meth:`~.Connection.close` method of the :class:`.Connection` object
is called, the underlying DBAPI connection is then returned to the
connection pool, where it may be used again in a subsequent call to
:meth:`~.Engine.connect`.
"""
return self._connection_cls(self, **kwargs)
def contextual_connect(self, close_with_result=False, **kwargs):
"""Return a :class:`.Connection` object which may be part of some
ongoing context.
By default, this method does the same thing as :meth:`.Engine.connect`.
Subclasses of :class:`.Engine` may override this method
to provide contextual behavior.
:param close_with_result: When True, the first :class:`.ResultProxy`
created by the :class:`.Connection` will call the
:meth:`.Connection.close` method of that connection as soon as any
pending result rows are exhausted. This is used to supply the
"connectionless execution" behavior provided by the
:meth:`.Engine.execute` method.
"""
return self._connection_cls(
self,
self._wrap_pool_connect(self.pool.connect, None),
close_with_result=close_with_result,
**kwargs)
def table_names(self, schema=None, connection=None):
"""Return a list of all table names available in the database.
:param schema: Optional, retrieve names from a non-default schema.
:param connection: Optional, use a specified connection. Default is
the ``contextual_connect`` for this ``Engine``.
"""
with self._optional_conn_ctx_manager(connection) as conn:
if not schema:
schema = self.dialect.default_schema_name
return self.dialect.get_table_names(conn, schema)
def has_table(self, table_name, schema=None):
"""Return True if the given backend has a table of the given name.
.. seealso::
:ref:`metadata_reflection_inspector` - detailed schema inspection
using the :class:`.Inspector` interface.
:class:`.quoted_name` - used to pass quoting information along
with a schema identifier.
"""
return self.run_callable(self.dialect.has_table, table_name, schema)
def _wrap_pool_connect(self, fn, connection):
dialect = self.dialect
try:
return fn()
except dialect.dbapi.Error as e:
if connection is None:
Connection._handle_dbapi_exception_noconnection(
e, dialect, self)
else:
util.reraise(*sys.exc_info())
def raw_connection(self, _connection=None):
"""Return a "raw" DBAPI connection from the connection pool.
The returned object is a proxied version of the DBAPI
connection object used by the underlying driver in use.
The object will have all the same behavior as the real DBAPI
connection, except that its ``close()`` method will result in the
connection being returned to the pool, rather than being closed
for real.
This method provides direct DBAPI connection access for
special situations when the API provided by :class:`.Connection`
is not needed. When a :class:`.Connection` object is already
present, the DBAPI connection is available using
the :attr:`.Connection.connection` accessor.
.. seealso::
:ref:`dbapi_connections`
"""
return self._wrap_pool_connect(
self.pool.unique_connection, _connection)
class OptionEngine(Engine):
_sa_propagate_class_events = False
def __init__(self, proxied, execution_options):
self._proxied = proxied
self.url = proxied.url
self.dialect = proxied.dialect
self.logging_name = proxied.logging_name
self.echo = proxied.echo
log.instance_logger(self, echoflag=self.echo)
# note: this will propagate events that are assigned to the parent
# engine after this OptionEngine is created. Since we share
# the events of the parent we also disallow class-level events
# to apply to the OptionEngine class directly.
#
# the other way this can work would be to transfer existing
# events only, using:
# self.dispatch._update(proxied.dispatch)
#
# that might be more appropriate however it would be a behavioral
# change for logic that assigns events to the parent engine and
# would like it to take effect for the already-created sub-engine.
self.dispatch = self.dispatch._join(proxied.dispatch)
self._execution_options = proxied._execution_options
self.update_execution_options(**execution_options)
def _get_pool(self):
return self._proxied.pool
def _set_pool(self, pool):
self._proxied.pool = pool
pool = property(_get_pool, _set_pool)
def _get_has_events(self):
return self._proxied._has_events or \
self.__dict__.get('_has_events', False)
def _set_has_events(self, value):
self.__dict__['_has_events'] = value
_has_events = property(_get_has_events, _set_has_events)
| 37.268904
| 84
| 0.609153
|
4a057fe627e825da97b4bfb5ece5d87b327b55f2
| 72,966
|
py
|
Python
|
O365/calendar.py
|
qsollet/python-o365
|
e138c12e44ff2f6788407b9305c7a84d5cd3047d
|
[
"Apache-2.0"
] | null | null | null |
O365/calendar.py
|
qsollet/python-o365
|
e138c12e44ff2f6788407b9305c7a84d5cd3047d
|
[
"Apache-2.0"
] | null | null | null |
O365/calendar.py
|
qsollet/python-o365
|
e138c12e44ff2f6788407b9305c7a84d5cd3047d
|
[
"Apache-2.0"
] | null | null | null |
import calendar
import datetime as dt
import logging
import pytz
# noinspection PyPep8Naming
from bs4 import BeautifulSoup as bs
from dateutil.parser import parse
from .utils import CaseEnum
from .utils import HandleRecipientsMixin
from .utils import AttachableMixin, ImportanceLevel, TrackerSet
from .utils import BaseAttachments, BaseAttachment
from .utils import Pagination, NEXT_LINK_KEYWORD, ApiComponent
from .utils.windows_tz import get_windows_tz
from .category import Category
log = logging.getLogger(__name__)
MONTH_NAMES = [calendar.month_name[x] for x in range(1, 13)]
class EventResponse(CaseEnum):
Organizer = 'organizer'
TentativelyAccepted = 'tentativelyAccepted'
Accepted = 'accepted'
Declined = 'declined'
NotResponded = 'notResponded'
class AttendeeType(CaseEnum):
Required = 'required'
Optional = 'optional'
Resource = 'resource'
class EventSensitivity(CaseEnum):
Normal = 'normal'
Personal = 'personal'
Private = 'private'
Confidential = 'confidential'
class EventShowAs(CaseEnum):
Free = 'free'
Tentative = 'tentative'
Busy = 'busy'
Oof = 'oof'
WorkingElsewhere = 'workingElsewhere'
Unknown = 'unknown'
class CalendarColor(CaseEnum):
LightBlue = 'lightBlue'
LightGreen = 'lightGreen'
LightOrange = 'lightOrange'
LightGray = 'lightGray'
LightYellow = 'lightYellow'
LightTeal = 'lightTeal'
LightPink = 'lightPink'
LightBrown = 'lightBrown'
LightRed = 'lightRed'
MaxColor = 'maxColor'
Auto = 'auto'
class EventType(CaseEnum):
SingleInstance = 'singleInstance' # a normal (non-recurring) event
Occurrence = 'occurrence' # all the other recurring events that is not the first one (seriesMaster)
Exception = 'exception' # ?
SeriesMaster = 'seriesMaster' # the first recurring event of the series
class OnlineMeetingProviderType(CaseEnum):
Unknown = 'unknown'
TeamsForBusiness = 'teamsForBusiness'
SkypeForBusiness = 'skypeForBusiness'
SkypeForConsumer = 'skypeForConsumer'
class EventAttachment(BaseAttachment):
_endpoints = {'attach': '/events/{id}/attachments'}
class EventAttachments(BaseAttachments):
_endpoints = {
'attachments': '/events/{id}/attachments',
'attachment': '/events/{id}/attachments/{ida}'
}
_attachment_constructor = EventAttachment
class DailyEventFrequency:
def __init__(self, recurrence_type, interval):
self.recurrence_type = recurrence_type
self.interval = interval
# noinspection PyAttributeOutsideInit
class EventRecurrence(ApiComponent):
def __init__(self, event, recurrence=None):
""" A representation of an event recurrence properties
:param Event event: event object
:param dict recurrence: recurrence information
"""
super().__init__(protocol=event.protocol,
main_resource=event.main_resource)
self._event = event
recurrence = recurrence or {}
# recurrence pattern
recurrence_pattern = recurrence.get(self._cc('pattern'), {})
self.__interval = recurrence_pattern.get(self._cc('interval'), None)
self.__days_of_week = recurrence_pattern.get(self._cc('daysOfWeek'),
set())
self.__first_day_of_week = recurrence_pattern.get(
self._cc('firstDayOfWeek'), None)
if 'type' in recurrence_pattern.keys():
if 'weekly' not in recurrence_pattern['type'].lower():
self.__first_day_of_week = None
self.__day_of_month = recurrence_pattern.get(self._cc('dayOfMonth'),
None)
self.__month = recurrence_pattern.get(self._cc('month'), None)
self.__index = recurrence_pattern.get(self._cc('index'), 'first')
# recurrence range
recurrence_range = recurrence.get(self._cc('range'), {})
self.__occurrences = recurrence_range.get(
self._cc('numberOfOccurrences'), None)
self.__start_date = recurrence_range.get(self._cc('startDate'), None)
self.__end_date = recurrence_range.get(self._cc('endDate'), None)
self.__recurrence_time_zone = recurrence_range.get(
self._cc('recurrenceTimeZone'),
get_windows_tz(self.protocol.timezone))
# time and time zones are not considered in recurrence ranges...
# I don't know why 'recurrenceTimeZone' is present here
# Sending a startDate datetime to the server results in an Error:
# Cannot convert the literal 'datetime' to the expected type 'Edm.Date'
if recurrence_range:
self.__start_date = parse(
self.__start_date).date() if self.__start_date else None
self.__end_date = parse(
self.__end_date).date() if self.__end_date else None
def __repr__(self):
if self.__interval:
pattern = 'Daily: every {} day/s'.format(self.__interval)
if self.__days_of_week:
days = ' or '.join(list(self.__days_of_week))
pattern = 'Relative Monthly: {} {} every {} month/s'.format(
self.__index, days, self.__interval)
if self.__first_day_of_week:
pattern = 'Weekly: every {} week/s on {}'.format(
self.__interval, days)
elif self.__month:
pattern = ('Relative Yearly: {} {} every {} year/s on {}'
''.format(self.__index, days,
self.__interval,
MONTH_NAMES[self.__month - 1]))
elif self.__day_of_month:
pattern = ('Absolute Monthly: on day {} every {} month/s'
''.format(self.__day_of_month, self.__interval))
if self.__month:
pattern = ('Absolute Yearly: on {} {} every {} year/s'
''.format(MONTH_NAMES[self.__month - 1],
self.__day_of_month,
self.__interval))
r_range = ''
if self.__start_date:
r_range = 'Starting on {}'.format(self.__start_date)
ends_on = 'with no end'
if self.__end_date:
ends_on = 'ending on {}'.format(self.__end_date)
elif self.__occurrences:
ends_on = 'up to {} occurrences'.format(self.__occurrences)
r_range = '{} {}'.format(r_range, ends_on)
return '{}. {}'.format(pattern, r_range)
else:
return 'No recurrence enabled'
def __str__(self):
return self.__repr__()
def __bool__(self):
return bool(self.__interval)
def _track_changes(self):
""" Update the track_changes on the event to reflect a needed
update on this field """
self._event._track_changes.add('recurrence')
@property
def interval(self):
""" Repeat interval for the event
:getter: Get the current interval
:setter: Update to a new interval
:type: int
"""
return self.__interval
@interval.setter
def interval(self, value):
self.__interval = value
self._track_changes()
@property
def days_of_week(self):
""" Days in week to repeat
:getter: Get the current list of days
:setter: Set the list of days to repeat
:type: set(str)
"""
return self.__days_of_week
@days_of_week.setter
def days_of_week(self, value):
self.__days_of_week = value
self._track_changes()
@property
def first_day_of_week(self):
""" Which day to consider start of the week
:getter: Get the current start of week
:setter: Set the start day of week
:type: str
"""
return self.__first_day_of_week
@first_day_of_week.setter
def first_day_of_week(self, value):
self.__first_day_of_week = value
self._track_changes()
@property
def day_of_month(self):
""" Repeat on this day of month
:getter: Get the repeat day of month
:setter: Set the repeat day of month
:type: int
"""
return self.__day_of_month
@day_of_month.setter
def day_of_month(self, value):
self.__day_of_month = value
self._track_changes()
@property
def month(self):
""" Month of the event
:getter: Get month
:setter: Update month
:type: int
"""
return self.__month
@month.setter
def month(self, value):
self.__month = value
self._track_changes()
@property
def index(self):
""" Index
:getter: Get index
:setter: Set index
:type: str
"""
return self.__index
@index.setter
def index(self, value):
self.__index = value
self._track_changes()
@property
def occurrences(self):
""" No. of occurrences
:getter: Get the no. of occurrences
:setter: Set the no. of occurrences
:type: int
"""
return self.__occurrences
@occurrences.setter
def occurrences(self, value):
self.__occurrences = value
self._track_changes()
@property
def recurrence_time_zone(self):
""" Timezone to consider for repeating
:getter: Get the timezone
:setter: Set the timezone
:type: str
"""
return self.__recurrence_time_zone
@recurrence_time_zone.setter
def recurrence_time_zone(self, value):
self.__recurrence_time_zone = value
self._track_changes()
@property
def start_date(self):
""" Start date of repetition
:getter: get the start date
:setter: set the start date
:type: date
"""
return self.__start_date
@start_date.setter
def start_date(self, value):
if not isinstance(value, dt.date):
raise ValueError('start_date value must be a valid date object')
if isinstance(value, dt.datetime):
value = value.date()
self.__start_date = value
self._track_changes()
@property
def end_date(self):
""" End date of repetition
:getter: get the end date
:setter: set the end date
:type: date
"""
return self.__end_date
@end_date.setter
def end_date(self, value):
if not isinstance(value, dt.date):
raise ValueError('end_date value must be a valid date object')
if isinstance(value, dt.datetime):
value = value.date()
self.__end_date = value
self._track_changes()
def to_api_data(self):
""" Returns a dict to communicate with the server
:rtype: dict
"""
data = {}
# recurrence pattern
if self.__interval and isinstance(self.__interval, int):
recurrence_pattern = data[self._cc('pattern')] = {}
recurrence_pattern[self._cc('type')] = 'daily'
recurrence_pattern[self._cc('interval')] = self.__interval
if self.__days_of_week and isinstance(self.__days_of_week,
(list, tuple, set)):
recurrence_pattern[self._cc('type')] = 'relativeMonthly'
recurrence_pattern[self._cc('daysOfWeek')] = list(
self.__days_of_week)
if self.__first_day_of_week:
recurrence_pattern[self._cc('type')] = 'weekly'
recurrence_pattern[
self._cc('firstDayOfWeek')] = self.__first_day_of_week
elif self.__month and isinstance(self.__month, int):
recurrence_pattern[self._cc('type')] = 'relativeYearly'
recurrence_pattern[self._cc('month')] = self.__month
if self.__index:
recurrence_pattern[self._cc('index')] = self.__index
else:
if self.__index:
recurrence_pattern[self._cc('index')] = self.__index
elif self.__day_of_month and isinstance(self.__day_of_month, int):
recurrence_pattern[self._cc('type')] = 'absoluteMonthly'
recurrence_pattern[self._cc('dayOfMonth')] = self.__day_of_month
if self.__month and isinstance(self.__month, int):
recurrence_pattern[self._cc('type')] = 'absoluteYearly'
recurrence_pattern[self._cc('month')] = self.__month
# recurrence range
if self.__start_date:
recurrence_range = data[self._cc('range')] = {}
recurrence_range[self._cc('type')] = 'noEnd'
recurrence_range[
self._cc('startDate')] = self.__start_date.isoformat()
recurrence_range[
self._cc('recurrenceTimeZone')] = self.__recurrence_time_zone
if self.__end_date:
recurrence_range[self._cc('type')] = 'endDate'
recurrence_range[
self._cc('endDate')] = self.__end_date.isoformat()
elif self.__occurrences is not None and isinstance(
self.__occurrences,
int):
recurrence_range[self._cc('type')] = 'numbered'
recurrence_range[
self._cc('numberOfOccurrences')] = self.__occurrences
return data
def _clear_pattern(self):
""" Clears this event recurrence """
# pattern group
self.__interval = None
self.__days_of_week = set()
self.__first_day_of_week = None
self.__day_of_month = None
self.__month = None
self.__index = 'first'
# range group
self.__start_date = None
self.__end_date = None
self.__occurrences = None
def set_range(self, start=None, end=None, occurrences=None):
""" Set the range of recurrence
:param date start: Start date of repetition
:param date end: End date of repetition
:param int occurrences: no of occurrences
"""
if start is None:
if self.__start_date is None:
self.__start_date = dt.date.today()
else:
self.start_date = start
if end:
self.end_date = end
elif occurrences:
self.__occurrences = occurrences
self._track_changes()
def set_daily(self, interval, **kwargs):
""" Set to repeat every x no. of days
:param int interval: no. of days to repeat at
:keyword date start: Start date of repetition (kwargs)
:keyword date end: End date of repetition (kwargs)
:keyword int occurrences: no of occurrences (kwargs)
"""
self._clear_pattern()
self.__interval = interval
self.set_range(**kwargs)
def set_weekly(self, interval, *, days_of_week, first_day_of_week,
**kwargs):
""" Set to repeat every week on specified days for every x no. of days
:param int interval: no. of days to repeat at
:param str first_day_of_week: starting day for a week
:param list[str] days_of_week: list of days of the week to repeat
:keyword date start: Start date of repetition (kwargs)
:keyword date end: End date of repetition (kwargs)
:keyword int occurrences: no of occurrences (kwargs)
"""
self.set_daily(interval, **kwargs)
self.__days_of_week = set(days_of_week)
self.__first_day_of_week = first_day_of_week
def set_monthly(self, interval, *, day_of_month=None, days_of_week=None,
index=None, **kwargs):
""" Set to repeat every month on specified days for every x no. of days
:param int interval: no. of days to repeat at
:param int day_of_month: repeat day of a month
:param list[str] days_of_week: list of days of the week to repeat
:param index: index
:keyword date start: Start date of repetition (kwargs)
:keyword date end: End date of repetition (kwargs)
:keyword int occurrences: no of occurrences (kwargs)
"""
if not day_of_month and not days_of_week:
raise ValueError('Must provide day_of_month or days_of_week values')
if day_of_month and days_of_week:
raise ValueError('Must provide only one of the two options')
self.set_daily(interval, **kwargs)
if day_of_month:
self.__day_of_month = day_of_month
elif days_of_week:
self.__days_of_week = set(days_of_week)
if index:
self.__index = index
def set_yearly(self, interval, month, *, day_of_month=None,
days_of_week=None, index=None, **kwargs):
""" Set to repeat every month on specified days for every x no. of days
:param int interval: no. of days to repeat at
:param int month: month to repeat
:param int day_of_month: repeat day of a month
:param list[str] days_of_week: list of days of the week to repeat
:param index: index
:keyword date start: Start date of repetition (kwargs)
:keyword date end: End date of repetition (kwargs)
:keyword int occurrences: no of occurrences (kwargs)
"""
self.set_monthly(interval, day_of_month=day_of_month,
days_of_week=days_of_week, index=index, **kwargs)
self.__month = month
class ResponseStatus(ApiComponent):
""" An event response status (status, time) """
def __init__(self, parent, response_status):
""" An event response status (status, time)
:param parent: parent of this
:type parent: Attendees or Event
:param dict response_status: status info frm cloud
"""
super().__init__(protocol=parent.protocol,
main_resource=parent.main_resource)
self.status = response_status.get(self._cc('response'), 'none')
self.status = None if self.status == 'none' else EventResponse.from_value(self.status)
if self.status:
self.response_time = response_status.get(self._cc('time'), None)
if self.response_time == '0001-01-01T00:00:00Z':
# consider there's no response time
# this way we don't try to convert this Iso 8601 datetime to the
# local timezone which generated parse errors
self.response_time = None
if self.response_time:
try:
self.response_time = parse(self.response_time).astimezone(
self.protocol.timezone)
except OverflowError:
log.debug("Couldn't parse event response time: {}".format(self.response_time))
self.response_time = None
else:
self.response_time = None
def __repr__(self):
return self.status or 'None'
def __str__(self):
return self.__repr__()
class Attendee:
""" A Event attendee """
def __init__(self, address, *, name=None, attendee_type=None,
response_status=None, event=None):
""" Create a event attendee
:param str address: email address of the attendee
:param str name: name of the attendee
:param AttendeeType attendee_type: requirement of attendee
:param Response response_status: response status requirement
:param Event event: event for which to assign the attendee
"""
self._untrack = True
self._address = address
self._name = name
self._event = event
if isinstance(response_status, ResponseStatus):
self.__response_status = response_status
else:
self.__response_status = None
self.__attendee_type = AttendeeType.Required
if attendee_type:
self.attendee_type = attendee_type
self._untrack = False
def __repr__(self):
if self.name:
return '{}: {} ({})'.format(self.attendee_type.name, self.name,
self.address)
else:
return '{}: {}'.format(self.attendee_type.name, self.address)
def __str__(self):
return self.__repr__()
@property
def address(self):
""" Email address
:getter: Get the email address of attendee
:setter: Set the email address of attendee
:type: str
"""
return self._address
@address.setter
def address(self, value):
self._address = value
self._name = ''
self._track_changes()
@property
def name(self):
""" Name
:getter: Get the name of attendee
:setter: Set the name of attendee
:type: str
"""
return self._name
@name.setter
def name(self, value):
self._name = value
self._track_changes()
def _track_changes(self):
""" Update the track_changes on the event to reflect a
needed update on this field """
if self._untrack is False:
self._event._track_changes.add('attendees')
@property
def response_status(self):
""" Response status of the attendee
:type: ResponseStatus
"""
return self.__response_status
@property
def attendee_type(self):
""" Requirement of the attendee
:getter: Get the requirement of attendee
:setter: Set the requirement of attendee
:type: AttendeeType
"""
return self.__attendee_type
@attendee_type.setter
def attendee_type(self, value):
if isinstance(value, AttendeeType):
self.__attendee_type = value
else:
self.__attendee_type = AttendeeType.from_value(value)
self._track_changes()
class Attendees(ApiComponent):
""" A Collection of Attendees """
def __init__(self, event, attendees=None):
""" Create a collection of attendees
:param Event event: event for which to assign the attendees
:param attendees: list of attendees to add
:type attendees: str or tuple(str, str) or Attendee or list[str] or
list[tuple(str,str)] or list[Attendee]
"""
super().__init__(protocol=event.protocol,
main_resource=event.main_resource)
self._event = event
self.__attendees = []
self.untrack = True
if attendees:
self.add(attendees)
self.untrack = False
def __iter__(self):
return iter(self.__attendees)
def __getitem__(self, key):
return self.__attendees[key]
def __contains__(self, item):
return item in {attendee.email for attendee in self.__attendees}
def __len__(self):
return len(self.__attendees)
def __str__(self):
return self.__repr__()
def __repr__(self):
return 'Attendees Count: {}'.format(len(self.__attendees))
def clear(self):
""" Clear the attendees list """
self.__attendees = []
self._track_changes()
def _track_changes(self):
""" Update the track_changes on the event to reflect a needed
update on this field """
if self.untrack is False:
self._event._track_changes.add('attendees')
def add(self, attendees):
""" Add attendees to the parent event
:param attendees: list of attendees to add
:type attendees: str or tuple(str, str) or Attendee or list[str] or
list[tuple(str,str)] or list[Attendee]
"""
if attendees:
if isinstance(attendees, str):
self.__attendees.append(
Attendee(address=attendees, event=self._event))
self._track_changes()
elif isinstance(attendees, Attendee):
self.__attendees.append(attendees)
self._track_changes()
elif isinstance(attendees, tuple):
name, address = attendees
if address:
self.__attendees.append(
Attendee(address=address, name=name, event=self._event))
self._track_changes()
elif isinstance(attendees, list):
for attendee in attendees:
self.add(attendee)
elif isinstance(attendees,
dict) and self._cloud_data_key in attendees:
attendees = attendees.get(self._cloud_data_key)
for attendee in attendees:
email = attendee.get(self._cc('emailAddress'), {})
address = email.get(self._cc('address'), None)
if address:
name = email.get(self._cc('name'), None)
# default value
attendee_type = attendee.get(self._cc('type'),
'required')
self.__attendees.append(
Attendee(address=address, name=name,
attendee_type=attendee_type,
event=self._event,
response_status=
ResponseStatus(parent=self,
response_status=
attendee.get(
self._cc('status'),
{}))))
else:
raise ValueError('Attendees must be an address string, an '
'Attendee instance, a (name, address) '
'tuple or a list')
def remove(self, attendees):
""" Remove the provided attendees from the event
:param attendees: list of attendees to add
:type attendees: str or tuple(str, str) or Attendee or list[str] or
list[tuple(str,str)] or list[Attendee]
"""
if isinstance(attendees, (list, tuple)):
attendees = {
attendee.address if isinstance(attendee, Attendee) else attendee
for
attendee in attendees}
elif isinstance(attendees, str):
attendees = {attendees}
elif isinstance(attendees, Attendee):
attendees = {attendees.address}
else:
raise ValueError('Incorrect parameter type for attendees')
new_attendees = []
for attendee in self.__attendees:
if attendee.address not in attendees:
new_attendees.append(attendee)
self.__attendees = new_attendees
self._track_changes()
def to_api_data(self):
""" Returns a dict to communicate with the server
:rtype: dict
"""
data = []
for attendee in self.__attendees:
if attendee.address:
att_data = {
self._cc('emailAddress'): {
self._cc('address'): attendee.address,
self._cc('name'): attendee.name
},
self._cc('type'): self._cc(attendee.attendee_type.value)
}
data.append(att_data)
return data
# noinspection PyAttributeOutsideInit
class Event(ApiComponent, AttachableMixin, HandleRecipientsMixin):
""" A Calendar event """
_endpoints = {
'calendar': '/calendars/{id}',
'event': '/events/{id}',
'event_default': '/calendar/events',
'event_calendar': '/calendars/{id}/events',
'occurrences': '/events/{id}/instances',
}
def __init__(self, *, parent=None, con=None, **kwargs):
""" Create a calendar event representation
:param parent: parent for this operation
:type parent: Calendar or Schedule or ApiComponent
:param Connection con: connection to use if no parent specified
:param Protocol protocol: protocol to use if no parent specified
(kwargs)
:param str main_resource: use this resource instead of parent resource
(kwargs)
:param str calendar_id: id of the calender to add this event in
(kwargs)
:param bool download_attachments: whether or not to download attachments
(kwargs)
:param str subject: subject of the event (kwargs)
"""
if parent and con:
raise ValueError('Need a parent or a connection but not both')
self.con = parent.con if parent else con
# Choose the main_resource passed in kwargs over parent main_resource
main_resource = kwargs.pop('main_resource', None) or (
getattr(parent, 'main_resource', None) if parent else None)
super().__init__(
protocol=parent.protocol if parent else kwargs.get('protocol'),
main_resource=main_resource)
cc = self._cc # alias
# internal to know which properties need to be updated on the server
self._track_changes = TrackerSet(casing=cc)
self.calendar_id = kwargs.get('calendar_id', None)
download_attachments = kwargs.get('download_attachments')
cloud_data = kwargs.get(self._cloud_data_key, {})
self.object_id = cloud_data.get(cc('id'), None)
self.__subject = cloud_data.get(cc('subject'),
kwargs.get('subject', '') or '')
body = cloud_data.get(cc('body'), {})
self.__body = body.get(cc('content'), '')
self.body_type = body.get(cc('contentType'),
'HTML') # default to HTML for new messages
self.__attendees = Attendees(event=self, attendees={
self._cloud_data_key: cloud_data.get(cc('attendees'), [])})
self.__categories = cloud_data.get(cc('categories'), [])
self.__created = cloud_data.get(cc('createdDateTime'), None)
self.__modified = cloud_data.get(cc('lastModifiedDateTime'), None)
local_tz = self.protocol.timezone
self.__created = parse(self.__created).astimezone(
local_tz) if self.__created else None
self.__modified = parse(self.__modified).astimezone(
local_tz) if self.__modified else None
start_obj = cloud_data.get(cc('start'), {})
self.__start = self._parse_date_time_time_zone(start_obj)
end_obj = cloud_data.get(cc('end'), {})
self.__end = self._parse_date_time_time_zone(end_obj)
self.has_attachments = cloud_data.get(cc('hasAttachments'), False)
self.__attachments = EventAttachments(parent=self, attachments=[])
if self.has_attachments and download_attachments:
self.attachments.download_attachments()
self.__categories = cloud_data.get(cc('categories'), [])
self.ical_uid = cloud_data.get(cc('iCalUId'), None)
self.__importance = ImportanceLevel.from_value(
cloud_data.get(cc('importance'), 'normal') or 'normal')
self.__is_all_day = cloud_data.get(cc('isAllDay'), False)
self.is_cancelled = cloud_data.get(cc('isCancelled'), False)
self.is_organizer = cloud_data.get(cc('isOrganizer'), True)
self.__location = cloud_data.get(cc('location'), {})
self.locations = cloud_data.get(cc('locations'), []) # TODO
self.online_meeting_url = cloud_data.get(cc('onlineMeetingUrl'), None)
self.__is_online_meeting = cloud_data.get(cc('isOnlineMeeting'), False)
self.__online_meeting_provider = OnlineMeetingProviderType.from_value(
cloud_data.get(cc('onlineMeetingProvider'), 'teamsForBusiness'))
self.online_meeting = cloud_data.get(cc('onlineMeeting'), None)
if not self.online_meeting_url and self.is_online_meeting:
self.online_meeting_url = self.online_meeting.get(cc('joinUrl'), None) \
if self.online_meeting else None
self.__organizer = self._recipient_from_cloud(
cloud_data.get(cc('organizer'), None), field=cc('organizer'))
self.__recurrence = EventRecurrence(event=self,
recurrence=cloud_data.get(
cc('recurrence'), None))
self.__is_reminder_on = cloud_data.get(cc('isReminderOn'), True)
self.__remind_before_minutes = cloud_data.get(
cc('reminderMinutesBeforeStart'), 15)
self.__response_requested = cloud_data.get(cc('responseRequested'),
True)
self.__response_status = ResponseStatus(parent=self,
response_status=cloud_data.get(
cc('responseStatus'), {}))
self.__sensitivity = EventSensitivity.from_value(
cloud_data.get(cc('sensitivity'), 'normal'))
self.series_master_id = cloud_data.get(cc('seriesMasterId'), None)
self.__show_as = EventShowAs.from_value(cloud_data.get(cc('showAs'), 'busy'))
self.__event_type = EventType.from_value(cloud_data.get(cc('type'), 'singleInstance'))
def __str__(self):
return self.__repr__()
def __repr__(self):
if self.start.date() == self.end.date():
return 'Subject: {} (on: {} from: {} to: {})'.format(self.subject, self.start.date(), self.start.time(), self.end.time())
else:
return 'Subject: {} (starts: {} {} and ends: {} {})'.format(self.subject, self.start.date(), self.start.time(), self.end.date(),
self.end.time())
def __eq__(self, other):
return self.object_id == other.object_id
def to_api_data(self, restrict_keys=None):
""" Returns a dict to communicate with the server
:param restrict_keys: a set of keys to restrict the returned data to
:rtype: dict
"""
cc = self._cc # alias
if self.__location:
if isinstance(self.__location, dict):
location = self.__location
else:
location = {cc('displayName'): self.__location}
else:
location = {cc('displayName'): ''}
data = {
cc('subject'): self.__subject,
cc('body'): {
cc('contentType'): self.body_type,
cc('content'): self.__body},
cc('start'): self._build_date_time_time_zone(self.__start),
cc('end'): self._build_date_time_time_zone(self.__end),
cc('attendees'): self.__attendees.to_api_data(),
cc('location'): location,
cc('categories'): self.__categories,
cc('isAllDay'): self.__is_all_day,
cc('importance'): cc(self.__importance.value),
cc('isReminderOn'): self.__is_reminder_on,
cc('reminderMinutesBeforeStart'): self.__remind_before_minutes,
cc('responseRequested'): self.__response_requested,
cc('sensitivity'): cc(self.__sensitivity.value),
cc('showAs'): cc(self.__show_as.value),
cc('isOnlineMeeting'): cc(self.__is_online_meeting),
cc('onlineMeetingProvider'): cc(self.__online_meeting_provider.value),
}
if self.__recurrence:
data[cc('recurrence')] = self.__recurrence.to_api_data()
if self.has_attachments:
data[cc('attachments')] = self.__attachments.to_api_data()
if restrict_keys:
if 'attachments' in restrict_keys:
self.attachments._update_attachments_to_cloud()
for key in list(data.keys()):
if key not in restrict_keys:
del data[key]
return data
@property
def created(self):
""" Created time of the event
:rtype: datetime
"""
return self.__created
@property
def modified(self):
""" Last modified time of the event
:rtype: datetime
"""
return self.__modified
@property
def body(self):
""" Body of the event
:getter: Get body text
:setter: Set body of event
:type: str
"""
return self.__body
@body.setter
def body(self, value):
self.__body = value
self._track_changes.add(self._cc('body'))
@property
def subject(self):
""" Subject of the event
:getter: Get subject
:setter: Set subject of event
:type: str
"""
return self.__subject
@subject.setter
def subject(self, value):
self.__subject = value
self._track_changes.add(self._cc('subject'))
@property
def start(self):
""" Start Time of event
:getter: get the start time
:setter: set the start time
:type: datetime
"""
return self.__start
@start.setter
def start(self, value):
if not isinstance(value, dt.date):
raise ValueError("'start' must be a valid datetime object")
if not isinstance(value, dt.datetime):
# force datetime
value = dt.datetime(value.year, value.month, value.day)
if value.tzinfo is None:
# localize datetime
value = self.protocol.timezone.localize(value)
elif value.tzinfo != self.protocol.timezone:
value = value.astimezone(self.protocol.timezone)
self.__start = value
if not self.end:
self.end = self.__start + dt.timedelta(minutes=30)
self._track_changes.add(self._cc('start'))
@property
def end(self):
""" End Time of event
:getter: get the end time
:setter: set the end time
:type: datetime
"""
return self.__end
@end.setter
def end(self, value):
if not isinstance(value, dt.date):
raise ValueError("'end' must be a valid datetime object")
if not isinstance(value, dt.datetime):
# force datetime
value = dt.datetime(value.year, value.month, value.day)
if value.tzinfo is None:
# localize datetime
value = self.protocol.timezone.localize(value)
elif value.tzinfo != self.protocol.timezone:
value = value.astimezone(self.protocol.timezone)
self.__end = value
self._track_changes.add(self._cc('end'))
@property
def importance(self):
""" Event Priority
:getter: get importance of event
:setter: set the importance of event
:type: ImportanceLevel
"""
return self.__importance
@importance.setter
def importance(self, value):
self.__importance = (value if isinstance(value, ImportanceLevel)
else ImportanceLevel.from_value(value))
self._track_changes.add(self._cc('importance'))
@property
def is_all_day(self):
""" Is the event for whole day
:getter: get the current status of is_all_day property
:setter: set if the event is all day or not
:type: bool
"""
return self.__is_all_day
@is_all_day.setter
def is_all_day(self, value):
self.__is_all_day = value
if value:
# Api requirement: start and end must be set to midnight
# is_all_day needs event.start included in the request on updates
# is_all_day needs event.end included in the request on updates
start = self.__start or dt.date.today()
end = self.__end or dt.date.today()
if (start + dt.timedelta(hours=24)) > end:
# Api requires that under is_all_day=True start and
# end must be at least 24 hours away
end = start + dt.timedelta(hours=24)
# set to midnight
start = dt.datetime(start.year, start.month, start.day)
end = dt.datetime(end.year, end.month, end.day)
self.start = start
self.end = end
self._track_changes.add(self._cc('isAllDay'))
@property
def location(self):
""" Location of event
:getter: get current location configured for the event
:setter: set a location for the event
:type: str
"""
return self.__location
@location.setter
def location(self, value):
self.__location = value
self._track_changes.add(self._cc('location'))
@property
def is_reminder_on(self):
""" Status of the Reminder
:getter: check is reminder enabled or not
:setter: enable or disable reminder option
:type: bool
"""
return self.__is_reminder_on
@is_reminder_on.setter
def is_reminder_on(self, value):
self.__is_reminder_on = value
self._track_changes.add(self._cc('isReminderOn'))
self._track_changes.add(self._cc('reminderMinutesBeforeStart'))
@property
def remind_before_minutes(self):
""" No. of minutes to remind before the meeting
:getter: get current minutes
:setter: set to remind before new x minutes
:type: int
"""
return self.__remind_before_minutes
@remind_before_minutes.setter
def remind_before_minutes(self, value):
self.__is_reminder_on = True
self.__remind_before_minutes = int(value)
self._track_changes.add(self._cc('isReminderOn'))
self._track_changes.add(self._cc('reminderMinutesBeforeStart'))
@property
def response_requested(self):
""" Is response requested or not
:getter: Is response requested or not
:setter: set the event to request response or not
:type: bool
"""
return self.__response_requested
@response_requested.setter
def response_requested(self, value):
self.__response_requested = value
self._track_changes.add(self._cc('responseRequested'))
@property
def recurrence(self):
""" Recurrence information of the event
:rtype: EventRecurrence
"""
return self.__recurrence
@property
def organizer(self):
""" Organizer of the meeting event
:rtype: Recipient
"""
return self.__organizer
@property
def show_as(self):
""" Show as "busy" or any other status during the event
:getter: Current status during the event
:setter: update show as status
:type: EventShowAs
"""
return self.__show_as
@show_as.setter
def show_as(self, value):
self.__show_as = (value if isinstance(value, EventShowAs)
else EventShowAs.from_value(value))
self._track_changes.add(self._cc('showAs'))
@property
def sensitivity(self):
""" Sensitivity of the Event
:getter: Get the current sensitivity
:setter: Set a new sensitivity
:type: EventSensitivity
"""
return self.__sensitivity
@sensitivity.setter
def sensitivity(self, value):
self.__sensitivity = (value if isinstance(value, EventSensitivity)
else EventSensitivity.from_value(value))
self._track_changes.add(self._cc('sensitivity'))
@property
def response_status(self):
""" Your response
:rtype: ResponseStatus
"""
return self.__response_status
@property
def attachments(self):
""" List of attachments
:rtype: EventAttachments
"""
return self.__attachments
@property
def attendees(self):
""" List of meeting attendees
:rtype: Attendees
"""
return self.__attendees
@property
def categories(self):
""" Categories of the event
:getter: get the list of categories
:setter: set the list of categories
:type: list[str]
"""
return self.__categories
@categories.setter
def categories(self, value):
if isinstance(value, list):
self.__categories = []
for val in value:
if isinstance(val, Category):
self.__categories.append(val.name)
else:
self.__categories.append(val)
elif isinstance(value, str):
self.__categories = [value]
elif isinstance(value, Category):
self.__categories = [value.name]
else:
raise ValueError('categories must be a list')
self._track_changes.add(self._cc('categories'))
@property
def event_type(self):
return self.__event_type
@property
def is_online_meeting(self):
""" Status of the online_meeting
:getter: check is online_meeting enabled or not
:setter: enable or disable online_meeting option
:type: bool
"""
return self.__is_online_meeting
@is_online_meeting.setter
def is_online_meeting(self, value):
self.__is_online_meeting = value
self._track_changes.add(self._cc('isOnlineMeeting'))
@property
def online_meeting_provider(self):
""" online_meeting_provider of event
:getter: get current online_meeting_provider configured for the event
:setter: set a online_meeting_provider for the event
:type: OnlineMeetingProviderType
"""
return self.__online_meeting_provider
@online_meeting_provider.setter
def online_meeting_provider(self, value):
self.__online_meeting_provider = (value if isinstance(value, OnlineMeetingProviderType)
else OnlineMeetingProviderType.from_value(value))
self._track_changes.add(self._cc('onlineMeetingProvider'))
def get_occurrences(self, start, end, *, limit=None, query=None, order_by=None, batch=None):
"""
Returns all the occurrences of a seriesMaster event for a specified time range.
:type start: datetime
:param start: the start of the time range
:type end: datetime
:param end: the end of the time range
:param int limit: ax no. of events to get. Over 999 uses batch.
:type query: Query or str
:param query: optional. extra filters or ordes to apply to this query
:type order_by: str
:param order_by: orders the result set based on this condition
:param int batch: batch size, retrieves items in
batches allowing to retrieve more items than the limit.
:return: a list of events
:rtype: list[Event] or Pagination
"""
if self.event_type != EventType.SeriesMaster:
# you can only get occurrences if its a seriesMaster
return []
url = self.build_url(
self._endpoints.get('occurrences').format(id=self.object_id))
if limit is None or limit > self.protocol.max_top_value:
batch = self.protocol.max_top_value
params = {'$top': batch if batch else limit}
if order_by:
params['$orderby'] = order_by
if query:
if isinstance(query, str):
params['$filter'] = query
else:
params.update(query.as_params())
if start.tzinfo is None:
# if it's a naive datetime, localize the datetime.
start = self.protocol.timezone.localize(start) # localize datetime into local tz
if start.tzinfo != pytz.utc:
start = start.astimezone(pytz.utc) # transform local datetime to utc
if end.tzinfo is None:
# if it's a naive datetime, localize the datetime.
end = self.protocol.timezone.localize(end) # localize datetime into local tz
if end.tzinfo != pytz.utc:
end = end.astimezone(pytz.utc) # transform local datetime to utc
params[self._cc('startDateTime')] = start.isoformat()
params[self._cc('endDateTime')] = end.isoformat()
response = self.con.get(url, params=params,
headers={'Prefer': 'outlook.timezone="UTC"'})
if not response:
return iter(())
data = response.json()
# Everything received from cloud must be passed as self._cloud_data_key
events = (self.__class__(parent=self, **{self._cloud_data_key: event})
for event in data.get('value', []))
next_link = data.get(NEXT_LINK_KEYWORD, None)
if batch and next_link:
return Pagination(parent=self, data=events,
constructor=self.__class__,
next_link=next_link, limit=limit)
else:
return events
def delete(self):
""" Deletes a stored event
:return: Success / Failure
:rtype: bool
"""
if self.object_id is None:
raise RuntimeError('Attempting to delete an unsaved event')
url = self.build_url(
self._endpoints.get('event').format(id=self.object_id))
response = self.con.delete(url)
return bool(response)
def save(self):
""" Create a new event or update an existing one by checking what
values have changed and update them on the server
:return: Success / Failure
:rtype: bool
"""
if self.object_id:
# update event
if not self._track_changes:
return True # there's nothing to update
url = self.build_url(
self._endpoints.get('event').format(id=self.object_id))
method = self.con.patch
data = self.to_api_data(restrict_keys=self._track_changes)
else:
# new event
if self.calendar_id:
url = self.build_url(
self._endpoints.get('event_calendar').format(
id=self.calendar_id))
else:
url = self.build_url(self._endpoints.get('event_default'))
method = self.con.post
data = self.to_api_data()
response = method(url, data=data)
if not response:
return False
self._track_changes.clear() # clear the tracked changes
if not self.object_id:
# new event
event = response.json()
self.object_id = event.get(self._cc('id'), None)
self.__created = event.get(self._cc('createdDateTime'), None)
self.__modified = event.get(self._cc('lastModifiedDateTime'), None)
self.__created = parse(self.__created).astimezone(
self.protocol.timezone) if self.__created else None
self.__modified = parse(self.__modified).astimezone(
self.protocol.timezone) if self.__modified else None
else:
self.__modified = self.protocol.timezone.localize(dt.datetime.now())
return True
def accept_event(self, comment=None, *, send_response=True,
tentatively=False):
""" Accept the event
:param comment: comment to add
:param send_response: whether or not to send response back
:param tentatively: whether acceptance is tentative
:return: Success / Failure
:rtype: bool
"""
if not self.object_id:
raise RuntimeError("Can't accept event that doesn't exist")
url = self.build_url(
self._endpoints.get('event').format(id=self.object_id))
url = url + '/tentativelyAccept' if tentatively else url + '/accept'
data = {}
if comment and isinstance(comment, str):
data[self._cc('comment')] = comment
if send_response is False:
data[self._cc('sendResponse')] = send_response
response = self.con.post(url, data=data or None)
return bool(response)
def decline_event(self, comment=None, *, send_response=True):
""" Decline the event
:param str comment: comment to add
:param bool send_response: whether or not to send response back
:return: Success / Failure
:rtype: bool
"""
if not self.object_id:
raise RuntimeError("Can't accept event that doesn't exist")
url = self.build_url(
self._endpoints.get('event').format(id=self.object_id))
url = url + '/decline'
data = {}
if comment and isinstance(comment, str):
data[self._cc('comment')] = comment
if send_response is False:
data[self._cc('sendResponse')] = send_response
response = self.con.post(url, data=data or None)
return bool(response)
def cancel_event(self, comment=None, *, send_response=True):
""" Cancel the event
:param str comment: comment to add
:param bool send_response: whether or not to send response back
:return: Success / Failure
:rtype: bool
"""
if not self.object_id:
raise RuntimeError("Can't accept event that doesn't exist")
url = self.build_url(
self._endpoints.get('event').format(id=self.object_id))
url = url + '/cancel'
data = {}
if comment and isinstance(comment, str):
data[self._cc('comment')] = comment
if send_response is False:
data[self._cc('sendResponse')] = send_response
response = self.con.post(url, data=data or None)
return bool(response)
def get_body_text(self):
""" Parse the body html and returns the body text using bs4
:return: body text
:rtype: str
"""
if self.body_type != 'HTML':
return self.body
try:
soup = bs(self.body, 'html.parser')
except RuntimeError:
return self.body
else:
return soup.body.text
def get_body_soup(self):
""" Returns the beautifulsoup4 of the html body
:return: Html body
:rtype: BeautifulSoup
"""
if self.body_type != 'HTML':
return None
else:
return bs(self.body, 'html.parser')
class Calendar(ApiComponent, HandleRecipientsMixin):
_endpoints = {
'calendar': '/calendars/{id}',
'get_events': '/calendars/{id}/events',
'default_events': '/calendar/events',
'events_view': '/calendars/{id}/calendarView',
'default_events_view': '/calendar/calendarView',
'get_event': '/calendars/{id}/events/{ide}',
}
event_constructor = Event
def __init__(self, *, parent=None, con=None, **kwargs):
""" Create a Calendar Representation
:param parent: parent for this operation
:type parent: Schedule
:param Connection con: connection to use if no parent specified
:param Protocol protocol: protocol to use if no parent specified
(kwargs)
:param str main_resource: use this resource instead of parent resource
(kwargs)
"""
if parent and con:
raise ValueError('Need a parent or a connection but not both')
self.con = parent.con if parent else con
# Choose the main_resource passed in kwargs over parent main_resource
main_resource = kwargs.pop('main_resource', None) or (
getattr(parent, 'main_resource', None) if parent else None)
super().__init__(
protocol=parent.protocol if parent else kwargs.get('protocol'),
main_resource=main_resource)
cloud_data = kwargs.get(self._cloud_data_key, {})
self.name = cloud_data.get(self._cc('name'), '')
self.calendar_id = cloud_data.get(self._cc('id'), None)
self.__owner = self._recipient_from_cloud(
cloud_data.get(self._cc('owner'), {}), field='owner')
color = cloud_data.get(self._cc('color'), 'auto')
try:
self.color = CalendarColor.from_value(color)
except:
self.color = CalendarColor.from_value('auto')
self.can_edit = cloud_data.get(self._cc('canEdit'), False)
self.can_share = cloud_data.get(self._cc('canShare'), False)
self.can_view_private_items = cloud_data.get(
self._cc('canViewPrivateItems'), False)
def __str__(self):
return self.__repr__()
def __repr__(self):
return 'Calendar: {} from {}'.format(self.name, self.owner)
def __eq__(self, other):
return self.calendar_id == other.calendar_id
@property
def owner(self):
""" Owner of the calendar
:rtype: str
"""
return self.__owner
def update(self):
""" Updates this calendar. Only name and color can be changed.
:return: Success / Failure
:rtype: bool
"""
if not self.calendar_id:
return False
url = self.build_url(self._endpoints.get('calendar'))
data = {
self._cc('name'): self.name,
self._cc('color'): self._cc(self.color.value
if isinstance(self.color, CalendarColor)
else self.color)
}
response = self.con.patch(url, data=data)
return bool(response)
def delete(self):
""" Deletes this calendar
:return: Success / Failure
:rtype: bool
"""
if not self.calendar_id:
return False
url = self.build_url(
self._endpoints.get('calendar').format(id=self.calendar_id))
response = self.con.delete(url)
if not response:
return False
self.calendar_id = None
return True
def get_events(self, limit=25, *, query=None, order_by=None, batch=None,
download_attachments=False, include_recurring=True):
""" Get events from the this Calendar
:param int limit: max no. of events to get. Over 999 uses batch.
:param query: applies a OData filter to the request
:type query: Query or str
:param order_by: orders the result set based on this condition
:type order_by: Query or str
:param int batch: batch size, retrieves items in
batches allowing to retrieve more items than the limit.
:param download_attachments: downloads event attachments
:param bool include_recurring: whether to include recurring events or not
:return: list of events in this calendar
:rtype: list[Event] or Pagination
"""
if self.calendar_id is None:
# I'm the default calendar
if include_recurring:
url = self.build_url(self._endpoints.get('default_events_view'))
else:
url = self.build_url(self._endpoints.get('default_events'))
else:
if include_recurring:
url = self.build_url(
self._endpoints.get('events_view').format(id=self.calendar_id))
else:
url = self.build_url(
self._endpoints.get('get_events').format(id=self.calendar_id))
if limit is None or limit > self.protocol.max_top_value:
batch = self.protocol.max_top_value
if batch:
download_attachments = False
params = {'$top': batch if batch else limit}
if include_recurring:
start = None
end = None
if query and not isinstance(query, str):
# extract start and end from query because
# those are required by a calendarView
for query_data in query._filters:
if not isinstance(query_data, list):
continue
attribute = query_data[0]
# the 2nd position contains the filter data
# and the 3rd position in filter_data contains the value
word = query_data[2][3]
if attribute.lower().startswith('start/'):
start = word.replace("'", '') # remove the quotes
query.remove_filter('start')
if attribute.lower().startswith('end/'):
end = word.replace("'", '') # remove the quotes
query.remove_filter('end')
if start is None or end is None:
raise ValueError("When 'include_recurring' is True you must provide a 'start' and 'end' datetimes inside a Query instance.")
if end < start:
raise ValueError('When using "include_recurring=True", the date asigned to the "end" datetime'
' should be greater or equal than the date asigned to the "start" datetime.')
params[self._cc('startDateTime')] = start
params[self._cc('endDateTime')] = end
if order_by:
params['$orderby'] = order_by
if query:
if isinstance(query, str):
params['$filter'] = query
else:
params.update(query.as_params())
response = self.con.get(url, params=params,
headers={'Prefer': 'outlook.timezone="UTC"'})
if not response:
return iter(())
data = response.json()
# Everything received from cloud must be passed as self._cloud_data_key
events = (self.event_constructor(parent=self,
download_attachments=
download_attachments,
**{self._cloud_data_key: event})
for event in data.get('value', []))
next_link = data.get(NEXT_LINK_KEYWORD, None)
if batch and next_link:
return Pagination(parent=self, data=events,
constructor=self.event_constructor,
next_link=next_link, limit=limit)
else:
return events
def new_event(self, subject=None):
""" Returns a new (unsaved) Event object
:rtype: Event
"""
return self.event_constructor(parent=self, subject=subject,
calendar_id=self.calendar_id)
def get_event(self, param):
""" Returns an Event instance by it's id
:param param: an event_id or a Query instance
:return: event for the specified info
:rtype: Event
"""
if param is None:
return None
if isinstance(param, str):
url = self.build_url(
self._endpoints.get('get_event').format(id=self.calendar_id,
ide=param))
params = None
by_id = True
else:
url = self.build_url(
self._endpoints.get('get_events').format(id=self.calendar_id))
params = {'$top': 1}
params.update(param.as_params())
by_id = False
response = self.con.get(url, params=params,
headers={'Prefer': 'outlook.timezone="UTC"'})
if not response:
return None
if by_id:
event = response.json()
else:
event = response.json().get('value', [])
if event:
event = event[0]
else:
return None
return self.event_constructor(parent=self,
**{self._cloud_data_key: event})
class Schedule(ApiComponent):
_endpoints = {
'root_calendars': '/calendars',
'get_calendar': '/calendars/{id}',
'default_calendar': '/calendar',
'get_availability': '/calendar/getSchedule',
}
calendar_constructor = Calendar
event_constructor = Event
def __init__(self, *, parent=None, con=None, **kwargs):
""" Create a wrapper around calendars and events
:param parent: parent for this operation
:type parent: Account
:param Connection con: connection to use if no parent specified
:param Protocol protocol: protocol to use if no parent specified
(kwargs)
:param str main_resource: use this resource instead of parent resource
(kwargs)
"""
if parent and con:
raise ValueError('Need a parent or a connection but not both')
self.con = parent.con if parent else con
# Choose the main_resource passed in kwargs over parent main_resource
main_resource = kwargs.pop('main_resource', None) or (
getattr(parent, 'main_resource', None) if parent else None)
super().__init__(
protocol=parent.protocol if parent else kwargs.get('protocol'),
main_resource=main_resource)
def __str__(self):
return self.__repr__()
def __repr__(self):
return 'Schedule resource: {}'.format(self.main_resource)
def list_calendars(self, limit=None, *, query=None, order_by=None):
""" Gets a list of calendars
To use query an order_by check the OData specification here:
http://docs.oasis-open.org/odata/odata/v4.0/errata03/os/complete/
part2-url-conventions/odata-v4.0-errata03-os-part2-url-conventions
-complete.html
:param int limit: max no. of calendars to get. Over 999 uses batch.
:param query: applies a OData filter to the request
:type query: Query or str
:param order_by: orders the result set based on this condition
:type order_by: Query or str
:return: list of calendars
:rtype: list[Calendar]
"""
url = self.build_url(self._endpoints.get('root_calendars'))
params = {}
if limit:
params['$top'] = limit
if query:
params['$filter'] = str(query)
if order_by:
params['$orderby'] = order_by
response = self.con.get(url, params=params or None)
if not response:
return []
data = response.json()
# Everything received from cloud must be passed as self._cloud_data_key
contacts = [self.calendar_constructor(parent=self, **{
self._cloud_data_key: x}) for x in data.get('value', [])]
return contacts
def new_calendar(self, calendar_name):
""" Creates a new calendar
:param str calendar_name: name of the new calendar
:return: a new Calendar instance
:rtype: Calendar
"""
if not calendar_name:
return None
url = self.build_url(self._endpoints.get('root_calendars'))
response = self.con.post(url, data={self._cc('name'): calendar_name})
if not response:
return None
data = response.json()
# Everything received from cloud must be passed as self._cloud_data_key
return self.calendar_constructor(parent=self,
**{self._cloud_data_key: data})
def get_calendar(self, calendar_id=None, calendar_name=None):
""" Returns a calendar by it's id or name
:param str calendar_id: the calendar id to be retrieved.
:param str calendar_name: the calendar name to be retrieved.
:return: calendar for the given info
:rtype: Calendar
"""
if calendar_id and calendar_name:
raise RuntimeError('Provide only one of the options')
if not calendar_id and not calendar_name:
raise RuntimeError('Provide one of the options')
if calendar_id:
# get calendar by it's id
url = self.build_url(
self._endpoints.get('get_calendar').format(id=calendar_id))
params = None
else:
# get calendar by name
url = self.build_url(self._endpoints.get('root_calendars'))
params = {
'$filter': "{} eq '{}'".format(self._cc('name'), calendar_name),
'$top': 1}
response = self.con.get(url, params=params)
if not response:
return None
if calendar_id:
data = response.json()
else:
data = response.json().get('value')
data = data[0] if data else None
if data is None:
return None
# Everything received from cloud must be passed as self._cloud_data_key
return self.calendar_constructor(parent=self,
**{self._cloud_data_key: data})
def get_default_calendar(self):
""" Returns the default calendar for the current user
:rtype: Calendar
"""
url = self.build_url(self._endpoints.get('default_calendar'))
response = self.con.get(url)
if not response:
return None
data = response.json()
# Everything received from cloud must be passed as self._cloud_data_key
return self.calendar_constructor(parent=self,
**{self._cloud_data_key: data})
def get_events(self, limit=25, *, query=None, order_by=None, batch=None,
download_attachments=False, include_recurring=True):
""" Get events from the default Calendar
:param int limit: max no. of events to get. Over 999 uses batch.
:param query: applies a OData filter to the request
:type query: Query or str
:param order_by: orders the result set based on this condition
:type order_by: Query or str
:param int batch: batch size, retrieves items in
batches allowing to retrieve more items than the limit.
:param bool download_attachments: downloads event attachments
:param bool include_recurring: whether to include recurring events or not
:return: list of items in this folder
:rtype: list[Event] or Pagination
"""
default_calendar = self.calendar_constructor(parent=self)
return default_calendar.get_events(limit=limit, query=query,
order_by=order_by, batch=batch,
download_attachments=download_attachments,
include_recurring=include_recurring)
def new_event(self, subject=None):
""" Returns a new (unsaved) Event object in the default calendar
:param str subject: subject text for the new event
:return: new event
:rtype: Event
"""
return self.event_constructor(parent=self, subject=subject)
def get_availability(self, schedules, start, end, interval=60):
"""
Returns the free/busy availability for a set of users in a given time frame
:param list schedules: a list of strings (email addresses)
:param datetime start: the start time frame to look for available space
:param datetime end: the end time frame to look for available space
:param int interval: the number of minutes to look for space
"""
url = self.build_url(self._endpoints.get('get_availability'))
data = {
'startTime': self._build_date_time_time_zone(start),
'endTime': self._build_date_time_time_zone(end),
'availabilityViewInterval': interval,
'schedules': schedules
}
response = self.con.post(url, data=data)
if not response:
return []
data = response.json().get('value', [])
# transform dates and availabilityView
availability_view_codes = {
'0': 'free',
'1': 'tentative',
'2': 'busy',
'3': 'out of office',
'4': 'working elsewhere',
}
for schedule in data:
a_view = schedule.get('availabilityView', '')
schedule['availabilityView'] = [availability_view_codes.get(code, 'unkknown') for code in a_view]
for item in schedule.get('scheduleItems', []):
item['start'] = self._parse_date_time_time_zone(item.get('start'))
item['end'] = self._parse_date_time_time_zone(item.get('end'))
return data
| 35.506569
| 140
| 0.586177
|
4a0580eb6b8b82e6f0bd3177a4d3e1de93e3422b
| 3,976
|
py
|
Python
|
oauth2client/contrib/_win32_opener.py
|
keyrrjperino/google-spreadsheet-get
|
e00769d7fb715ff6e86c5404b97407d36ca3c786
|
[
"MIT"
] | 41
|
2016-07-20T06:30:26.000Z
|
2021-05-18T20:31:18.000Z
|
oauth2client/contrib/_win32_opener.py
|
keyrrjperino/google-spreadsheet-get
|
e00769d7fb715ff6e86c5404b97407d36ca3c786
|
[
"MIT"
] | 20
|
2016-09-09T17:48:13.000Z
|
2022-03-16T17:25:58.000Z
|
oauth2client/contrib/_win32_opener.py
|
keyrrjperino/google-spreadsheet-get
|
e00769d7fb715ff6e86c5404b97407d36ca3c786
|
[
"MIT"
] | 5
|
2017-09-21T20:44:22.000Z
|
2022-03-18T16:01:26.000Z
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import time
import pywintypes
import win32con
import win32file
from oauth2client.contrib.locked_file import _Opener
from oauth2client.contrib.locked_file import AlreadyLockedException
from oauth2client.contrib.locked_file import logger
from oauth2client.contrib.locked_file import validate_file
class _Win32Opener(_Opener):
"""Open, lock, and unlock a file using windows primitives."""
# Error #33:
# 'The process cannot access the file because another process'
FILE_IN_USE_ERROR = 33
# Error #158:
# 'The segment is already unlocked.'
FILE_ALREADY_UNLOCKED_ERROR = 158
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
CredentialsFileSymbolicLinkError: if the file is a symbolic
link.
"""
if self._locked:
raise AlreadyLockedException('File %s is already locked' %
self._filename)
start_time = time.time()
validate_file(self._filename)
try:
self._fh = open(self._filename, self._mode)
except IOError as e:
# If we can't access with _mode, try _fallback_mode
# and don't lock.
if e.errno == errno.EACCES:
self._fh = open(self._filename, self._fallback_mode)
return
# We opened in _mode, try to lock the file.
while True:
try:
hfile = win32file._get_osfhandle(self._fh.fileno())
win32file.LockFileEx(
hfile,
(win32con.LOCKFILE_FAIL_IMMEDIATELY |
win32con.LOCKFILE_EXCLUSIVE_LOCK), 0, -0x10000,
pywintypes.OVERLAPPED())
self._locked = True
return
except pywintypes.error as e:
if timeout == 0:
raise
# If the error is not that the file is already
# in use, raise.
if e[0] != _Win32Opener.FILE_IN_USE_ERROR:
raise
# We could not acquire the lock. Try again.
if (time.time() - start_time) >= timeout:
logger.warn('Could not lock %s in %s seconds' % (
self._filename, timeout))
if self._fh:
self._fh.close()
self._fh = open(self._filename, self._fallback_mode)
return
time.sleep(delay)
def unlock_and_close(self):
"""Close and unlock the file using the win32 primitive."""
if self._locked:
try:
hfile = win32file._get_osfhandle(self._fh.fileno())
win32file.UnlockFileEx(hfile, 0, -0x10000,
pywintypes.OVERLAPPED())
except pywintypes.error as e:
if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR:
raise
self._locked = False
if self._fh:
self._fh.close()
| 36.145455
| 74
| 0.57998
|
4a0581b06cf8298b4204d1c881d6b83785654d8e
| 1,791
|
py
|
Python
|
vision/google/cloud/vision_v1p3beta1/__init__.py
|
conwaychriscosmo/google-cloud-python
|
8e7b7f8a5f4bb04d13f4d88ec3848f017faf834a
|
[
"Apache-2.0"
] | 1
|
2019-03-26T21:44:51.000Z
|
2019-03-26T21:44:51.000Z
|
vision/google/cloud/vision_v1p3beta1/__init__.py
|
conwaychriscosmo/google-cloud-python
|
8e7b7f8a5f4bb04d13f4d88ec3848f017faf834a
|
[
"Apache-2.0"
] | 1
|
2019-03-29T22:03:48.000Z
|
2019-04-02T22:24:45.000Z
|
vision/google/cloud/vision_v1p3beta1/__init__.py
|
conwaychriscosmo/google-cloud-python
|
8e7b7f8a5f4bb04d13f4d88ec3848f017faf834a
|
[
"Apache-2.0"
] | 1
|
2019-03-29T18:26:16.000Z
|
2019-03-29T18:26:16.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from google.cloud.vision_helpers.decorators import add_single_feature_methods
from google.cloud.vision_helpers import VisionHelpers
import sys
import warnings
from google.cloud.vision_v1p3beta1 import types
from google.cloud.vision_v1p3beta1.gapic import enums
from google.cloud.vision_v1p3beta1.gapic import image_annotator_client as iac
from google.cloud.vision_v1p3beta1.gapic import product_search_client
if sys.version_info[:2] == (2, 7):
message = (
"A future version of this library will drop support for Python 2.7."
"More details about Python 2 support for Google Cloud Client Libraries"
"can be found at https://cloud.google.com/python/docs/python2-sunset/"
)
warnings.warn(message, DeprecationWarning)
class ProductSearchClient(product_search_client.ProductSearchClient):
__doc__ = product_search_client.ProductSearchClient.__doc__
enums = enums
@add_single_feature_methods
class ImageAnnotatorClient(VisionHelpers, iac.ImageAnnotatorClient):
__doc__ = iac.ImageAnnotatorClient.__doc__
enums = enums
__all__ = ("enums", "types", "ProductSearchClient", "ImageAnnotatorClient")
| 34.442308
| 79
| 0.778894
|
4a0583c081927bd3e331b829d8c6926f68fb849b
| 138
|
py
|
Python
|
lib/egauge/constants.py
|
goztrk/django-htk
|
c56bf112e5d627780d2f4288460eae5cce80fa9e
|
[
"MIT"
] | 206
|
2015-10-15T07:05:08.000Z
|
2021-02-19T11:48:36.000Z
|
lib/egauge/constants.py
|
goztrk/django-htk
|
c56bf112e5d627780d2f4288460eae5cce80fa9e
|
[
"MIT"
] | 8
|
2017-10-16T10:18:31.000Z
|
2022-03-09T14:24:27.000Z
|
lib/egauge/constants.py
|
goztrk/django-htk
|
c56bf112e5d627780d2f4288460eae5cce80fa9e
|
[
"MIT"
] | 61
|
2015-10-15T08:12:44.000Z
|
2022-03-10T12:25:06.000Z
|
EGAUGE_API_URLS = {
'stored' : 'http://%s.egaug.es/cgi-bin/egauge-show',
'instantaneous' : 'http://%s.egaug.es/cgi-bin/egauge',
}
| 27.6
| 58
| 0.623188
|
4a0583ca9e4193addf6b6d4d2f43995b67342480
| 2,468
|
py
|
Python
|
coko/classes/configuration.py
|
dante-signal31/coko
|
c803433f28602b0ecbbd86329d624557e4986a10
|
[
"BSD-3-Clause"
] | null | null | null |
coko/classes/configuration.py
|
dante-signal31/coko
|
c803433f28602b0ecbbd86329d624557e4986a10
|
[
"BSD-3-Clause"
] | null | null | null |
coko/classes/configuration.py
|
dante-signal31/coko
|
c803433f28602b0ecbbd86329d624557e4986a10
|
[
"BSD-3-Clause"
] | null | null | null |
import dataclasses
import os.path
from typing import List
import coko.classes.exceptions as exceptions
@dataclasses.dataclass
class FileOwnership:
uid: int
guid: int
permissions: int
permissions_octal: bool = dataclasses.field(default=False)
def __post_init__(self)-> None:
# We let user to enter permissions in octal but we convert to int
# because system functions we use deal with them as int.
#
# User only would enter permissions in octal in automated tests
# contexts. In production coko will get native permissions
# directly from files, so what it would read would be int (from st_mode).
if self.permissions_octal:
self.permissions = int(str(self.permissions), 8)
def __eq__(self, other)-> bool:
# permissions_octal should not be compared for equality because it is
# not actually related to file but just a switch to create automated
# tests. If we would let default __eq__ automated test would fail.
return all([self.uid == other.uid,
self.guid == other.guid,
self.permissions == other.permissions])
class Folder(object):
"""A descriptor that sets and returns system folders checking folder
actually exists.
"""
def __init__(self):
self._folder_path: str = None
def __get__(self, obj, objtype)-> str:
return self._folder_path
def __set__(self, obj, value):
absolute_path: str = os.path.abspath(value)
if os.path.isdir(absolute_path):
self._folder_path: str = os.path.abspath(value)
else:
raise exceptions.FolderNotFound(absolute_path)
class Configuration:
source_folder: Folder = Folder()
destination_folder: Folder = Folder()
def __init__(self, source_folder: str, destination_folder: str,
default_ownership: List):
self.source_folder = source_folder
self.destination_folder = destination_folder
if default_ownership is not None:
self.default_ownership: FileOwnership = FileOwnership(default_ownership[0],
default_ownership[1],
default_ownership[2],
default_ownership[3])
else:
self.default_ownership = None
| 35.768116
| 87
| 0.618314
|
4a0583f33b83f803903be225c24812d7dd98a649
| 16,960
|
py
|
Python
|
fragbuilder/pdb_tools.py
|
berquist/fragbuilder
|
bb17bb1e20e6d6cc2770281228b222b1a54f1cf6
|
[
"BSD-2-Clause"
] | 8
|
2015-04-11T17:43:13.000Z
|
2021-12-02T10:18:45.000Z
|
fragbuilder/pdb_tools.py
|
berquist/fragbuilder
|
bb17bb1e20e6d6cc2770281228b222b1a54f1cf6
|
[
"BSD-2-Clause"
] | null | null | null |
fragbuilder/pdb_tools.py
|
berquist/fragbuilder
|
bb17bb1e20e6d6cc2770281228b222b1a54f1cf6
|
[
"BSD-2-Clause"
] | 6
|
2015-04-01T07:18:26.000Z
|
2021-04-24T11:11:18.000Z
|
import math
import sys
from math_utils import *
from names import *
from bio_pdb import PDBParser
from bio_pdb import is_aa
from bio_pdb import calc_dihedral
class PDB:
""" Usually instantiated from something like:
pdbfile = fragbuilder.PDB("1UBQ.pdb")
"""
def __init__(self, pdbfile):
""" Wrapper class for Bio.PDB which makes it convenient to
read phi/psi/omega/chi torsion angles from a PDB-file.
Arguments:
pdbfile -- The PDB file you wish to read.
"""
try:
self._parser = PDBParser(QUIET=True)
except:
# Workaround for missing QUIET keyword
# in certain versions of Biopython.
self._parser = PDBParser()
self._pdbfile = pdbfile
self._structure = self._parser.get_structure("pdb", self._pdbfile)
self._chain = self._get_first_chain(self._structure)
self._sequence = self._get_sequence_from_chain(self._chain)
def get_length(self):
""" Returns the length of the protein.
"""
length = 0
for residue in self._chain:
if is_aa(residue):
length += 1
return length
def get_residue_numbers(self):
""" Returns a list with indexes of all amino acids in the chain.
Can be used for iterating over residues, e.g.:
>>> for i in pdbfile.get_residue_numbers():
... print i, pdbfile.get_residue_bb_angles(i)
"""
length = self.get_length()
return range(1, length + 1)
def get_chi_angles(self, resnum):
""" Returns a list of chi angles for a residue.
Arguments:
resnum -- The number of the residue.
NOTE: Also corrects for incorrect naming of CG1/CG2 in
valine residues and CD1/CD2 in leucine residues.
Will display an error if .pdb file is incorrect.
"""
angles_rad = self._get_chi(self._chain[resnum])
angles_deg = [angle * RAD_TO_DEG for angle in angles_rad]
return angles_deg
def get_bb_angles(self, resnum):
""" Returns a list of [phi, psi, omega] angles for a residue.
Arguments:
resnum -- The number of the residue.
"""
length = self.get_length()
angles_deg = []
if resnum == 1:
res_1 = self._chain[resnum]
res_2 = self._chain[resnum + 1]
N1 = res_1['N' ].get_vector()
CA1 = res_1['CA'].get_vector()
C1 = res_1['C' ].get_vector()
N2 = res_2['N' ].get_vector()
phi = None
psi = calc_dihedral(N1, CA1, C1, N2) * RAD_TO_DEG
omega = None
angles_deg = [phi, psi, omega]
elif resnum == length:
res_0 = self._chain[resnum - 1]
res_1 = self._chain[resnum]
CA0 = res_0['CA'].get_vector()
C0 = res_0['C' ].get_vector()
N1 = res_1['N' ].get_vector()
CA1 = res_1['CA'].get_vector()
C1 = res_1['C' ].get_vector()
phi = calc_dihedral(C0, N1, CA1, C1) * RAD_TO_DEG
psi = None
omega = calc_dihedral(CA0, C0, N1, CA1) * RAD_TO_DEG
angles_deg = [phi, psi, omega]
else:
res_0 = self._chain[resnum - 1]
res_1 = self._chain[resnum]
res_2 = self._chain[resnum + 1]
CA0 = res_0['CA'].get_vector()
C0 = res_0['C' ].get_vector()
N1 = res_1['N' ].get_vector()
CA1 = res_1['CA'].get_vector()
C1 = res_1['C' ].get_vector()
N2 = res_2['N' ].get_vector()
phi = calc_dihedral(C0, N1, CA1, C1) * RAD_TO_DEG
psi = calc_dihedral(N1, CA1, C1, N2) * RAD_TO_DEG
omega = calc_dihedral(CA0, C0, N1, CA1) * RAD_TO_DEG
angles_deg = [phi, psi, omega]
return angles_deg
def _get_chi(self, residue):
""" Returns a list of chi angles for a residue """
if residue.get_resname() == 'ALA':
return []
if residue.get_resname() == 'GLY':
return []
if residue.get_resname() == 'ARG':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD'].get_vector()
sc_atom6 = residue['NE'].get_vector()
sc_atom7 = residue['CZ'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
chi3 = calc_dihedral(sc_atom3, sc_atom4, sc_atom5, sc_atom6)
chi4 = calc_dihedral(sc_atom4, sc_atom5, sc_atom6, sc_atom7)
return [chi1, chi2, chi3, chi4]
if residue.get_resname() == 'ASN':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['OD1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
return [chi1, chi2]
if residue.get_resname() == 'ASP':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['OD1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
return [chi1, chi2]
if residue.get_resname() == 'CYS':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['SG'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
return [chi1]
if residue.get_resname() == 'GLU':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD'].get_vector()
sc_atom6 = residue['OE1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
chi3 = calc_dihedral(sc_atom3, sc_atom4, sc_atom5, sc_atom6)
return [chi1, chi2, chi3]
if residue.get_resname() == 'GLN':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD'].get_vector()
sc_atom6 = residue['OE1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
chi3 = calc_dihedral(sc_atom3, sc_atom4, sc_atom5, sc_atom6)
return [chi1, chi2, chi3]
if residue.get_resname() == 'HIS':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD2'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
return [chi1, chi2]
if residue.get_resname() == 'ILE':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG1'].get_vector()
sc_atom5 = residue['CD1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
return [chi1, chi2]
if residue.get_resname() == 'LEU':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD1'].get_vector()
sc_atom5_b = residue['CD2'].get_vector()
# Check for correct naming of CD1/CD2
check_angle = calc_dihedral(sc_atom5, sc_atom4, sc_atom3, sc_atom5_b)
# If the naming of the CD1 and CD2 atoms is correct,
# the check_angle will be around -120 deg. If the names
# are swapped, the angle will be around 120 deg.
if check_angle > 0:
sc_atom5 = sc_atom5_b
print "WARNING: Correcting for incorrect naming of CD1 and CD2 in residue LEU%i." % residue.get_id()[1]
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
return [chi1, chi2]
if residue.get_resname() == 'LYS':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD'].get_vector()
sc_atom6 = residue['CE'].get_vector()
sc_atom7 = residue['NZ'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
chi3 = calc_dihedral(sc_atom3, sc_atom4, sc_atom5, sc_atom6)
chi4 = calc_dihedral(sc_atom4, sc_atom5, sc_atom6, sc_atom7)
return [chi1, chi2, chi3, chi4]
if residue.get_resname() == 'MET':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['SD'].get_vector()
sc_atom6 = residue['CE'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
chi3 = calc_dihedral(sc_atom3, sc_atom4, sc_atom5, sc_atom6)
return [chi1, chi2, chi3]
if residue.get_resname() == 'PHE':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
return [chi1, chi2]
if residue.get_resname() == 'PRO':
# sc_atom1 = residue['N'].get_vector()
# sc_atom2 = residue['CA'].get_vector()
# sc_atom3 = residue['CB'].get_vector()
# sc_atom4 = residue['CG'].get_vector()
# sc_atom5 = residue['CD'].get_vector()
# chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
# chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
# chi3 = calc_dihedral(sc_atom3, sc_atom4, sc_atom5, sc_atom1)
# chi4 = calc_dihedral(sc_atom4, sc_atom5, sc_atom1, sc_atom2)
# return [chi1, chi2, chi3, chi4]
return []
if residue.get_resname() == 'SER':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['OG'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
return [chi1]
if residue.get_resname() == 'THR':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['OG1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
return [chi1]
if residue.get_resname() == 'TRP':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
return [chi1, chi2]
if residue.get_resname() == 'TYR':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG'].get_vector()
sc_atom5 = residue['CD1'].get_vector()
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
chi2 = calc_dihedral(sc_atom2, sc_atom3, sc_atom4, sc_atom5)
return [chi1, chi2]
if residue.get_resname() == 'VAL':
sc_atom1 = residue['N'].get_vector()
sc_atom2 = residue['CA'].get_vector()
sc_atom3 = residue['CB'].get_vector()
sc_atom4 = residue['CG1'].get_vector()
sc_atom4_b = residue['CG2'].get_vector()
# Check for correct naming of CG1/CG2
check_angle = calc_dihedral(sc_atom4, sc_atom3, sc_atom2, sc_atom4_b)
# If the naming of the CG1 and CG2 atoms is correct,
# the check_angle will be around -120 deg. If the names
# are swapped, the angle will be around 120 deg.
if check_angle > 0:
sc_atom4 = sc_atom4_b
print "WARNING: Correcting for incorrect naming of CG1 and CG2 in residue VAL%i." % residue.get_id()[1]
chi1 = calc_dihedral(sc_atom1, sc_atom2, sc_atom3, sc_atom4)
return [chi1]
else:
return "FAILLLL"
def _get_first_chain(self, structure):
""" Returns the first chain in a Bio.PDB structure object """
for model in structure:
for chain in model:
return chain
def get_sequence(self):
""" Returns the amino acid sequence from the PDB structure. """
return self._sequence
def get_resname(self, resnum):
""" Returns the three letter code for a residue in the PDB file.
E.g. "VAL", "ALA", etc.
Arguments:
resnum -- The number of the residue
"""
letter = self._sequence[resnum - 1]
return one_to_three(letter)
def _get_sequence_from_chain(self, chain):
""" Extracts the amino acid sequence from a Bio.PDB chain object """
sequence = ""
for residue in chain:
if is_aa(residue):
sequence += three_to_one(residue.get_resname())
else:
break
return sequence
| 43.598972
| 128
| 0.517453
|
4a05847e3922837240f3f8835817f23a485afbc7
| 340
|
py
|
Python
|
users/models.py
|
harryghgim/django-blog
|
13b5613766664e3250c12824c25a021bce33c8e4
|
[
"MIT"
] | null | null | null |
users/models.py
|
harryghgim/django-blog
|
13b5613766664e3250c12824c25a021bce33c8e4
|
[
"MIT"
] | 10
|
2021-03-21T15:46:43.000Z
|
2022-03-12T00:40:23.000Z
|
users/models.py
|
gwanghyeongim/django-blog
|
13b5613766664e3250c12824c25a021bce33c8e4
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
from PIL import Image
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default='default.jpg', upload_to='profile_pics')
def __str__(self):
return f'{self.user.username} Profile'
| 28.333333
| 78
| 0.747059
|
4a0584be3cedf5f91ec4798730b269a045b9911d
| 33,574
|
py
|
Python
|
telemetry/telemetry/internal/actions/action_runner.py
|
cloudera/catapult
|
b15a8c1c417380f4c8289596354cd10496eac12d
|
[
"BSD-3-Clause"
] | null | null | null |
telemetry/telemetry/internal/actions/action_runner.py
|
cloudera/catapult
|
b15a8c1c417380f4c8289596354cd10496eac12d
|
[
"BSD-3-Clause"
] | 1
|
2021-02-23T22:20:14.000Z
|
2021-02-23T22:20:14.000Z
|
telemetry/telemetry/internal/actions/action_runner.py
|
isabella232/catapult
|
b15a8c1c417380f4c8289596354cd10496eac12d
|
[
"BSD-3-Clause"
] | 1
|
2020-12-12T10:38:37.000Z
|
2020-12-12T10:38:37.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import time
import urlparse
from telemetry.internal.actions.drag import DragAction
from telemetry.internal.actions.javascript_click import ClickElementAction
from telemetry.internal.actions.key_event import KeyPressAction
from telemetry.internal.actions.load_media import LoadMediaAction
from telemetry.internal.actions.loop import LoopAction
from telemetry.internal.actions.mouse_click import MouseClickAction
from telemetry.internal.actions.navigate import NavigateAction
from telemetry.internal.actions.page_action import GESTURE_SOURCE_DEFAULT
from telemetry.internal.actions.page_action import SUPPORTED_GESTURE_SOURCES
from telemetry.internal.actions.pinch import PinchAction
from telemetry.internal.actions.play import PlayAction
from telemetry.internal.actions.repaint_continuously import (
RepaintContinuouslyAction)
from telemetry.internal.actions.repeatable_scroll import RepeatableScrollAction
from telemetry.internal.actions.scroll import ScrollAction
from telemetry.internal.actions.scroll_bounce import ScrollBounceAction
from telemetry.internal.actions.seek import SeekAction
from telemetry.internal.actions.swipe import SwipeAction
from telemetry.internal.actions.tap import TapAction
from telemetry.internal.actions.wait import WaitForElementAction
from telemetry.web_perf import timeline_interaction_record
_DUMP_WAIT_TIME = 3
class ActionRunner(object):
def __init__(self, tab, skip_waits=False):
self._tab = tab
self._skip_waits = skip_waits
@property
def tab(self):
"""Returns the tab on which actions are performed."""
return self._tab
def _RunAction(self, action):
action.WillRunAction(self._tab)
action.RunAction(self._tab)
def CreateInteraction(self, label, repeatable=False):
""" Create an action.Interaction object that issues interaction record.
An interaction record is a labeled time period containing
interaction that developers care about. Each set of metrics
specified in flags will be calculated for this time period.
To mark the start of interaction record, call Begin() method on the returned
object. To mark the finish of interaction record, call End() method on
it. Or better yet, use the with statement to create an
interaction record that covers the actions in the with block.
e.g:
with action_runner.CreateInteraction('Animation-1'):
action_runner.TapElement(...)
action_runner.WaitForJavaScriptCondition(...)
Args:
label: A label for this particular interaction. This can be any
user-defined string, but must not contain '/'.
repeatable: Whether other interactions may use the same logical name
as this interaction. All interactions with the same logical name must
have the same flags.
Returns:
An instance of action_runner.Interaction
"""
flags = []
if repeatable:
flags.append(timeline_interaction_record.REPEATABLE)
return Interaction(self._tab, label, flags)
def CreateGestureInteraction(self, label, repeatable=False):
""" Create an action.Interaction object that issues gesture-based
interaction record.
This is similar to normal interaction record, but it will
auto-narrow the interaction time period to only include the
synthetic gesture event output by Chrome. This is typically use to
reduce noise in gesture-based analysis (e.g., analysis for a
swipe/scroll).
The interaction record label will be prepended with 'Gesture_'.
e.g:
with action_runner.CreateGestureInteraction('Scroll-1'):
action_runner.ScrollPage()
Args:
label: A label for this particular interaction. This can be any
user-defined string, but must not contain '/'.
repeatable: Whether other interactions may use the same logical name
as this interaction. All interactions with the same logical name must
have the same flags.
Returns:
An instance of action_runner.Interaction
"""
return self.CreateInteraction('Gesture_' + label, repeatable)
def MeasureMemory(self, deterministic_mode=False):
"""Add a memory measurement to the trace being recorded.
Behaves as a no-op if tracing is not enabled.
TODO(perezju): Also behave as a no-op if tracing is enabled but
memory-infra is not.
Args:
deterministic_mode: A boolean indicating whether to attempt or not to
control the environment (force GCs, clear caches) before making the
measurement in an attempt to obtain more deterministic results.
Returns:
GUID of the generated dump if one was triggered, None otherwise.
"""
platform = self.tab.browser.platform
if not platform.tracing_controller.is_tracing_running:
logging.warning('Tracing is off. No memory dumps are being recorded.')
return None
if deterministic_mode:
self.Wait(_DUMP_WAIT_TIME)
self.ForceGarbageCollection()
if platform.SupportFlushEntireSystemCache():
platform.FlushEntireSystemCache()
self.Wait(_DUMP_WAIT_TIME)
dump_id = self.tab.browser.DumpMemory()
assert dump_id, 'Unable to obtain memory dump'
return dump_id
def Navigate(self, url, script_to_evaluate_on_commit=None,
timeout_in_seconds=60):
"""Navigates to |url|.
If |script_to_evaluate_on_commit| is given, the script source string will be
evaluated when the navigation is committed. This is after the context of
the page exists, but before any script on the page itself has executed.
"""
if urlparse.urlparse(url).scheme == 'file':
url = self._tab.browser.platform.http_server.UrlOf(url[7:])
self._RunAction(NavigateAction(
url=url,
script_to_evaluate_on_commit=script_to_evaluate_on_commit,
timeout_in_seconds=timeout_in_seconds))
def WaitForNavigate(self, timeout_in_seconds_seconds=60):
start_time = time.time()
self._tab.WaitForNavigate(timeout_in_seconds_seconds)
time_left_in_seconds = (start_time + timeout_in_seconds_seconds
- time.time())
time_left_in_seconds = max(0, time_left_in_seconds)
self._tab.WaitForDocumentReadyStateToBeInteractiveOrBetter(
time_left_in_seconds)
def ReloadPage(self):
"""Reloads the page."""
self._tab.ExecuteJavaScript('window.location.reload()')
self._tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
def ExecuteJavaScript(self, statement):
"""Executes a given JavaScript expression. Does not return the result.
Example: runner.ExecuteJavaScript('var foo = 1;');
Args:
statement: The statement to execute (provided as string).
Raises:
EvaluationException: The statement failed to execute.
"""
self._tab.ExecuteJavaScript(statement)
def EvaluateJavaScript(self, expression):
"""Returns the evaluation result of the given JavaScript expression.
The evaluation results must be convertible to JSON. If the result
is not needed, use ExecuteJavaScript instead.
Example: num = runner.EvaluateJavaScript('document.location.href')
Args:
expression: The expression to evaluate (provided as string).
Raises:
EvaluationException: The statement expression failed to execute
or the evaluation result can not be JSON-ized.
"""
return self._tab.EvaluateJavaScript(expression)
def Wait(self, seconds):
"""Wait for the number of seconds specified.
Args:
seconds: The number of seconds to wait.
"""
if not self._skip_waits:
time.sleep(seconds)
def WaitForJavaScriptCondition(self, condition, timeout_in_seconds=60):
"""Wait for a JavaScript condition to become true.
Example: runner.WaitForJavaScriptCondition('window.foo == 10');
Args:
condition: The JavaScript condition (as string).
timeout_in_seconds: The timeout in seconds (default to 60).
"""
self._tab.WaitForJavaScriptExpression(condition, timeout_in_seconds)
def WaitForElement(self, selector=None, text=None, element_function=None,
timeout_in_seconds=60):
"""Wait for an element to appear in the document.
The element may be selected via selector, text, or element_function.
Only one of these arguments must be specified.
Args:
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'(function() { return foo.element; })()'.
timeout_in_seconds: The timeout in seconds (default to 60).
"""
self._RunAction(WaitForElementAction(
selector=selector, text=text, element_function=element_function,
timeout_in_seconds=timeout_in_seconds))
def TapElement(self, selector=None, text=None, element_function=None):
"""Tap an element.
The element may be selected via selector, text, or element_function.
Only one of these arguments must be specified.
Args:
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'(function() { return foo.element; })()'.
"""
self._RunAction(TapAction(
selector=selector, text=text, element_function=element_function))
def ClickElement(self, selector=None, text=None, element_function=None):
"""Click an element.
The element may be selected via selector, text, or element_function.
Only one of these arguments must be specified.
Args:
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'(function() { return foo.element; })()'.
"""
self._RunAction(ClickElementAction(
selector=selector, text=text, element_function=element_function))
def DragPage(self, left_start_ratio, top_start_ratio, left_end_ratio,
top_end_ratio, speed_in_pixels_per_second=800, use_touch=False,
selector=None, text=None, element_function=None):
"""Perform a drag gesture on the page.
You should specify a start and an end point in ratios of page width and
height (see drag.js for full implementation).
Args:
left_start_ratio: The horizontal starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
top_start_ratio: The vertical starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
left_end_ratio: The horizontal ending coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
top_end_ratio: The vertical ending coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
use_touch: Whether dragging should be done with touch input.
"""
self._RunAction(DragAction(
left_start_ratio=left_start_ratio, top_start_ratio=top_start_ratio,
left_end_ratio=left_end_ratio, top_end_ratio=top_end_ratio,
speed_in_pixels_per_second=speed_in_pixels_per_second,
use_touch=use_touch, selector=selector, text=text,
element_function=element_function))
def PinchPage(self, left_anchor_ratio=0.5, top_anchor_ratio=0.5,
scale_factor=None, speed_in_pixels_per_second=800):
"""Perform the pinch gesture on the page.
It computes the pinch gesture automatically based on the anchor
coordinate and the scale factor. The scale factor is the ratio of
of the final span and the initial span of the gesture.
Args:
left_anchor_ratio: The horizontal pinch anchor coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
top_anchor_ratio: The vertical pinch anchor coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
scale_factor: The ratio of the final span to the initial span.
The default scale factor is
3.0 / (window.outerWidth/window.innerWidth).
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
"""
self._RunAction(PinchAction(
left_anchor_ratio=left_anchor_ratio, top_anchor_ratio=top_anchor_ratio,
scale_factor=scale_factor,
speed_in_pixels_per_second=speed_in_pixels_per_second))
def PinchElement(self, selector=None, text=None, element_function=None,
left_anchor_ratio=0.5, top_anchor_ratio=0.5,
scale_factor=None, speed_in_pixels_per_second=800):
"""Perform the pinch gesture on an element.
It computes the pinch gesture automatically based on the anchor
coordinate and the scale factor. The scale factor is the ratio of
of the final span and the initial span of the gesture.
Args:
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'function() { return foo.element; }'.
left_anchor_ratio: The horizontal pinch anchor coordinate of the
gesture, as a ratio of the visible bounding rectangle for
the element.
top_anchor_ratio: The vertical pinch anchor coordinate of the
gesture, as a ratio of the visible bounding rectangle for
the element.
scale_factor: The ratio of the final span to the initial span.
The default scale factor is
3.0 / (window.outerWidth/window.innerWidth).
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
"""
self._RunAction(PinchAction(
selector=selector, text=text, element_function=element_function,
left_anchor_ratio=left_anchor_ratio, top_anchor_ratio=top_anchor_ratio,
scale_factor=scale_factor,
speed_in_pixels_per_second=speed_in_pixels_per_second))
def ScrollPage(self, left_start_ratio=0.5, top_start_ratio=0.5,
direction='down', distance=None, distance_expr=None,
speed_in_pixels_per_second=800, use_touch=False,
synthetic_gesture_source=GESTURE_SOURCE_DEFAULT):
"""Perform scroll gesture on the page.
You may specify distance or distance_expr, but not both. If
neither is specified, the default scroll distance is variable
depending on direction (see scroll.js for full implementation).
Args:
left_start_ratio: The horizontal starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
top_start_ratio: The vertical starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
direction: The direction of scroll, either 'left', 'right',
'up', 'down', 'upleft', 'upright', 'downleft', or 'downright'
distance: The distance to scroll (in pixel).
distance_expr: A JavaScript expression (as string) that can be
evaluated to compute scroll distance. Example:
'window.scrollTop' or '(function() { return crazyMath(); })()'.
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
use_touch: Whether scrolling should be done with touch input.
synthetic_gesture_source: the source input device type for the
synthetic gesture: 'DEFAULT', 'TOUCH' or 'MOUSE'.
"""
assert synthetic_gesture_source in SUPPORTED_GESTURE_SOURCES
self._RunAction(ScrollAction(
left_start_ratio=left_start_ratio, top_start_ratio=top_start_ratio,
direction=direction, distance=distance, distance_expr=distance_expr,
speed_in_pixels_per_second=speed_in_pixels_per_second,
use_touch=use_touch, synthetic_gesture_source=synthetic_gesture_source))
def RepeatableBrowserDrivenScroll(self, x_scroll_distance_ratio=0.0,
y_scroll_distance_ratio=0.5,
repeat_count=0,
repeat_delay_ms=250,
timeout=60,
prevent_fling=None,
speed=None):
"""Perform a browser driven repeatable scroll gesture.
The scroll gesture is driven from the browser, this is useful because the
main thread often isn't resposive but the browser process usually is, so the
delay between the scroll gestures should be consistent.
Args:
x_scroll_distance_ratio: The horizontal length of the scroll as a fraction
of the screen width.
y_scroll_distance_ratio: The vertical length of the scroll as a fraction
of the screen height.
repeat_count: The number of additional times to repeat the gesture.
repeat_delay_ms: The delay in milliseconds between each scroll gesture.
prevent_fling: Prevents a fling gesture.
speed: Swipe speed in pixels per second.
"""
self._RunAction(RepeatableScrollAction(
x_scroll_distance_ratio=x_scroll_distance_ratio,
y_scroll_distance_ratio=y_scroll_distance_ratio,
repeat_count=repeat_count,
repeat_delay_ms=repeat_delay_ms, timeout=timeout,
prevent_fling=prevent_fling, speed=speed))
def ScrollElement(self, selector=None, text=None, element_function=None,
left_start_ratio=0.5, top_start_ratio=0.5,
direction='down', distance=None, distance_expr=None,
speed_in_pixels_per_second=800, use_touch=False,
synthetic_gesture_source=GESTURE_SOURCE_DEFAULT):
"""Perform scroll gesture on the element.
The element may be selected via selector, text, or element_function.
Only one of these arguments must be specified.
You may specify distance or distance_expr, but not both. If
neither is specified, the default scroll distance is variable
depending on direction (see scroll.js for full implementation).
Args:
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'function() { return foo.element; }'.
left_start_ratio: The horizontal starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
the element.
top_start_ratio: The vertical starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
the element.
direction: The direction of scroll, either 'left', 'right',
'up', 'down', 'upleft', 'upright', 'downleft', or 'downright'
distance: The distance to scroll (in pixel).
distance_expr: A JavaScript expression (as string) that can be
evaluated to compute scroll distance. Example:
'window.scrollTop' or '(function() { return crazyMath(); })()'.
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
use_touch: Whether scrolling should be done with touch input.
synthetic_gesture_source: the source input device type for the
synthetic gesture: 'DEFAULT', 'TOUCH' or 'MOUSE'.
"""
assert synthetic_gesture_source in SUPPORTED_GESTURE_SOURCES
self._RunAction(ScrollAction(
selector=selector, text=text, element_function=element_function,
left_start_ratio=left_start_ratio, top_start_ratio=top_start_ratio,
direction=direction, distance=distance, distance_expr=distance_expr,
speed_in_pixels_per_second=speed_in_pixels_per_second,
use_touch=use_touch, synthetic_gesture_source=synthetic_gesture_source))
def ScrollBouncePage(self, left_start_ratio=0.5, top_start_ratio=0.5,
direction='down', distance=100,
overscroll=10, repeat_count=10,
speed_in_pixels_per_second=400):
"""Perform scroll bounce gesture on the page.
This gesture scrolls the page by the number of pixels specified in
distance, in the given direction, followed by a scroll by
(distance + overscroll) pixels in the opposite direction.
The above gesture is repeated repeat_count times.
Args:
left_start_ratio: The horizontal starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
top_start_ratio: The vertical starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
direction: The direction of scroll, either 'left', 'right',
'up', 'down', 'upleft', 'upright', 'downleft', or 'downright'
distance: The distance to scroll (in pixel).
overscroll: The number of additional pixels to scroll back, in
addition to the givendistance.
repeat_count: How often we want to repeat the full gesture.
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
"""
self._RunAction(ScrollBounceAction(
left_start_ratio=left_start_ratio, top_start_ratio=top_start_ratio,
direction=direction, distance=distance,
overscroll=overscroll, repeat_count=repeat_count,
speed_in_pixels_per_second=speed_in_pixels_per_second))
def ScrollBounceElement(
self, selector=None, text=None, element_function=None,
left_start_ratio=0.5, top_start_ratio=0.5,
direction='down', distance=100,
overscroll=10, repeat_count=10,
speed_in_pixels_per_second=400):
"""Perform scroll bounce gesture on the element.
This gesture scrolls on the element by the number of pixels specified in
distance, in the given direction, followed by a scroll by
(distance + overscroll) pixels in the opposite direction.
The above gesture is repeated repeat_count times.
Args:
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'function() { return foo.element; }'.
left_start_ratio: The horizontal starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
top_start_ratio: The vertical starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
direction: The direction of scroll, either 'left', 'right',
'up', 'down', 'upleft', 'upright', 'downleft', or 'downright'
distance: The distance to scroll (in pixel).
overscroll: The number of additional pixels to scroll back, in
addition to the given distance.
repeat_count: How often we want to repeat the full gesture.
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
"""
self._RunAction(ScrollBounceAction(
selector=selector, text=text, element_function=element_function,
left_start_ratio=left_start_ratio, top_start_ratio=top_start_ratio,
direction=direction, distance=distance,
overscroll=overscroll, repeat_count=repeat_count,
speed_in_pixels_per_second=speed_in_pixels_per_second))
def MouseClick(self, selector=None):
"""Mouse click the given element.
Args:
selector: A CSS selector describing the element.
"""
self._RunAction(MouseClickAction(selector=selector))
def SwipePage(self, left_start_ratio=0.5, top_start_ratio=0.5,
direction='left', distance=100, speed_in_pixels_per_second=800):
"""Perform swipe gesture on the page.
Args:
left_start_ratio: The horizontal starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
top_start_ratio: The vertical starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
document.body.
direction: The direction of swipe, either 'left', 'right',
'up', or 'down'
distance: The distance to swipe (in pixel).
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
"""
self._RunAction(SwipeAction(
left_start_ratio=left_start_ratio, top_start_ratio=top_start_ratio,
direction=direction, distance=distance,
speed_in_pixels_per_second=speed_in_pixels_per_second))
def SwipeElement(self, selector=None, text=None, element_function=None,
left_start_ratio=0.5, top_start_ratio=0.5,
direction='left', distance=100,
speed_in_pixels_per_second=800):
"""Perform swipe gesture on the element.
The element may be selected via selector, text, or element_function.
Only one of these arguments must be specified.
Args:
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'function() { return foo.element; }'.
left_start_ratio: The horizontal starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
the element.
top_start_ratio: The vertical starting coordinate of the
gesture, as a ratio of the visible bounding rectangle for
the element.
direction: The direction of swipe, either 'left', 'right',
'up', or 'down'
distance: The distance to swipe (in pixel).
speed_in_pixels_per_second: The speed of the gesture (in pixels/s).
"""
self._RunAction(SwipeAction(
selector=selector, text=text, element_function=element_function,
left_start_ratio=left_start_ratio, top_start_ratio=top_start_ratio,
direction=direction, distance=distance,
speed_in_pixels_per_second=speed_in_pixels_per_second))
def PressKey(self, key, repeat_count=1, repeat_delay_ms=100, timeout=60):
"""Perform a key press.
Args:
key: DOM value of the pressed key (e.g. 'PageDown', see
https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key).
repeat_count: How many times the key should be pressed.
repeat_delay_ms: Delay after each keypress (including the last one) in
milliseconds.
"""
for _ in xrange(repeat_count):
self._RunAction(KeyPressAction(key, timeout=timeout))
self.Wait(repeat_delay_ms / 1000.0)
def EnterText(self, text, character_delay_ms=100, timeout=60):
"""Enter text by performing key presses.
Args:
text: The text to enter.
character_delay_ms: Delay after each keypress (including the last one) in
milliseconds.
"""
for c in text:
self.PressKey(c, repeat_delay_ms=character_delay_ms, timeout=timeout)
def LoadMedia(self, selector=None, event_timeout_in_seconds=0,
event_to_await='canplaythrough'):
"""Invokes load() on media elements and awaits an event.
Args:
selector: A CSS selector describing the element. If none is
specified, play the first media element on the page. If the
selector matches more than 1 media element, all of them will
be played.
event_timeout_in_seconds: Maximum waiting time for the event to be fired.
0 means do not wait.
event_to_await: Which event to await. For example: 'canplaythrough' or
'loadedmetadata'.
Raises:
TimeoutException: If the maximum waiting time is exceeded.
"""
self._RunAction(LoadMediaAction(
selector=selector, timeout_in_seconds=event_timeout_in_seconds,
event_to_await=event_to_await))
def PlayMedia(self, selector=None,
playing_event_timeout_in_seconds=0,
ended_event_timeout_in_seconds=0):
"""Invokes the "play" action on media elements (such as video).
Args:
selector: A CSS selector describing the element. If none is
specified, play the first media element on the page. If the
selector matches more than 1 media element, all of them will
be played.
playing_event_timeout_in_seconds: Maximum waiting time for the "playing"
event (dispatched when the media begins to play) to be fired.
0 means do not wait.
ended_event_timeout_in_seconds: Maximum waiting time for the "ended"
event (dispatched when playback completes) to be fired.
0 means do not wait.
Raises:
TimeoutException: If the maximum waiting time is exceeded.
"""
self._RunAction(PlayAction(
selector=selector,
playing_event_timeout_in_seconds=playing_event_timeout_in_seconds,
ended_event_timeout_in_seconds=ended_event_timeout_in_seconds))
def SeekMedia(self, seconds, selector=None, timeout_in_seconds=0,
log_time=True, label=''):
"""Performs a seek action on media elements (such as video).
Args:
seconds: The media time to seek to.
selector: A CSS selector describing the element. If none is
specified, seek the first media element on the page. If the
selector matches more than 1 media element, all of them will
be seeked.
timeout_in_seconds: Maximum waiting time for the "seeked" event
(dispatched when the seeked operation completes) to be
fired. 0 means do not wait.
log_time: Whether to log the seek time for the perf
measurement. Useful when performing multiple seek.
label: A suffix string to name the seek perf measurement.
Raises:
TimeoutException: If the maximum waiting time is exceeded.
"""
self._RunAction(SeekAction(
seconds=seconds, selector=selector,
timeout_in_seconds=timeout_in_seconds,
log_time=log_time, label=label))
def LoopMedia(self, loop_count, selector=None, timeout_in_seconds=None):
"""Loops a media playback.
Args:
loop_count: The number of times to loop the playback.
selector: A CSS selector describing the element. If none is
specified, loop the first media element on the page. If the
selector matches more than 1 media element, all of them will
be looped.
timeout_in_seconds: Maximum waiting time for the looped playback to
complete. 0 means do not wait. None (the default) means to
wait loop_count * 60 seconds.
Raises:
TimeoutException: If the maximum waiting time is exceeded.
"""
self._RunAction(LoopAction(
loop_count=loop_count, selector=selector,
timeout_in_seconds=timeout_in_seconds))
def ForceGarbageCollection(self):
"""Forces JavaScript garbage collection on the page."""
self._tab.CollectGarbage()
def SimulateMemoryPressureNotification(self, pressure_level):
"""Simulate memory pressure notification.
Args:
pressure_level: 'moderate' or 'critical'.
"""
self._tab.browser.SimulateMemoryPressureNotification(pressure_level)
def PauseInteractive(self):
"""Pause the page execution and wait for terminal interaction.
This is typically used for debugging. You can use this to pause
the page execution and inspect the browser state before
continuing.
"""
raw_input("Interacting... Press Enter to continue.")
def RepaintContinuously(self, seconds):
"""Continuously repaints the visible content.
It does this by requesting animation frames until the given number
of seconds have elapsed AND at least three RAFs have been
fired. Times out after max(60, self.seconds), if less than three
RAFs were fired."""
self._RunAction(RepaintContinuouslyAction(
seconds=0 if self._skip_waits else seconds))
class Interaction(object):
def __init__(self, action_runner, label, flags):
assert action_runner
assert label
assert isinstance(flags, list)
self._action_runner = action_runner
self._label = label
self._flags = flags
self._started = False
def __enter__(self):
self.Begin()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_value is None:
self.End()
else:
logging.warning(
'Exception was raised in the with statement block, the end of '
'interaction record is not marked.')
def Begin(self):
assert not self._started
self._started = True
self._action_runner.ExecuteJavaScript(
'console.time("%s");' %
timeline_interaction_record.GetJavaScriptMarker(
self._label, self._flags))
def End(self):
assert self._started
self._started = False
self._action_runner.ExecuteJavaScript(
'console.timeEnd("%s");' %
timeline_interaction_record.GetJavaScriptMarker(
self._label, self._flags))
| 42.445006
| 80
| 0.707452
|
4a058524717a4bace37c9185cf82ad7fe4acf95d
| 5,748
|
py
|
Python
|
CodonSubstitution/build/biopython/Bio/Phylo/PAML/_parse_yn00.py
|
JackCurragh/DARNED
|
13963d129bd8f69fb1106ad1f47394b3211a939c
|
[
"MIT"
] | 37
|
2015-02-24T18:58:30.000Z
|
2021-03-07T21:22:18.000Z
|
CodonSubstitution/build/biopython/Bio/Phylo/PAML/_parse_yn00.py
|
JackCurragh/DARNED
|
13963d129bd8f69fb1106ad1f47394b3211a939c
|
[
"MIT"
] | 12
|
2016-06-09T21:57:00.000Z
|
2020-09-11T18:48:51.000Z
|
CodonSubstitution/build/biopython/Bio/Phylo/PAML/_parse_yn00.py
|
JackCurragh/DARNED
|
13963d129bd8f69fb1106ad1f47394b3211a939c
|
[
"MIT"
] | 19
|
2016-03-26T08:15:17.000Z
|
2021-04-12T05:03:29.000Z
|
# Copyright (C) 2011 by Brandon Invergo (b.invergo@gmail.com)
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
import re
def parse_ng86(lines, results):
""" Parse the Nei & Gojobori (1986) section of the resuls.
Nei_Gojobori results are organized in a lower
triangular mattrix, with the sequence names labeling
the rows and statistics in the format:
w (dN dS) per column
Example row (2 columns):
0.0000 (0.0000 0.0207) 0.0000 (0.0000 0.0421)"""
sequences = []
for line in lines:
# Find all floating point numbers in this line
line_floats_res = re.findall("-*\d+\.\d+", line)
line_floats = [float(val) for val in line_floats_res]
matrix_row_res = re.match("(.+)\s{5,15}",line)
if matrix_row_res is not None:
seq_name = matrix_row_res.group(1).strip()
sequences.append(seq_name)
results[seq_name] = {}
for i in range(0, len(line_floats), 3):
NG86 = {}
NG86["omega"] = line_floats[i]
NG86["dN"] = line_floats[i+1]
NG86["dS"] = line_floats[i+2]
results[seq_name][sequences[i//3]] = {"NG86":NG86}
results[sequences[i//3]][seq_name] = {"NG86":NG86}
return (results, sequences)
def parse_yn00(lines, results, sequences):
""" Parse the Yang & Nielsen (2000) part of the results.
Yang & Nielsen results are organized in a table with
each row comprising one pairwise species comparison.
Rows are labeled by spequence number rather than by
sequence name."""
# Example (header row and first table row):
# seq. seq. S N t kappa omega dN +- SE dS +- SE
# 2 1 67.3 154.7 0.0136 3.6564 0.0000 -0.0000 +- 0.0000 0.0150
# +- 0.0151
for line in lines:
# Find all floating point numbers in this line
line_floats_res = re.findall("-*\d+\.\d+", line)
line_floats = [float(val) for val in line_floats_res]
row_res = re.match("\s+(\d+)\s+(\d+)", line)
if row_res is not None:
seq1 = int(row_res.group(1))
seq2 = int(row_res.group(2))
seq_name1 = sequences[seq1-1]
seq_name2 = sequences[seq2-1]
YN00 = {}
YN00["S"] = line_floats[0]
YN00["N"] = line_floats[1]
YN00["t"] = line_floats[2]
YN00["kappa"] = line_floats[3]
YN00["omega"] = line_floats[4]
YN00["dN"] = line_floats[5]
YN00["dN SE"] = line_floats[6]
YN00["dS"] = line_floats[7]
YN00["dS SE"] = line_floats[8]
results[seq_name1][seq_name2]["YN00"] = YN00
results[seq_name2][seq_name1]["YN00"] = YN00
seq_name1 = None
seq_name2 = None
return results
def parse_others(lines, results, sequences):
"""Parse the results from the other methods.
The remaining methods are grouped together. Statistics
for all three are listed for each of the pairwise
species comparisons, with each method's results on its
own line.
The stats in this section must be handled differently
due to the possible presence of NaN values, which won't
get caught by my typical "line_floats" method used above.
"""
# Example:
# 2 (Pan_troglo) vs. 1 (Homo_sapie)
# L(i): 143.0 51.0 28.0 sum= 222.0
# Ns(i): 0.0000 1.0000 0.0000 sum= 1.0000
# Nv(i): 0.0000 0.0000 0.0000 sum= 0.0000
# A(i): 0.0000 0.0200 0.0000
# B(i): -0.0000 -0.0000 -0.0000
# LWL85: dS = 0.0227 dN = 0.0000 w = 0.0000 S = 45.0 N = 177.0
# LWL85m: dS = -nan dN = -nan w = -nan S = -nan N = -nan (rho = -nan)
# LPB93: dS = 0.0129 dN = 0.0000 w = 0.0000
seq_name1 = None
seq_name2 = None
for line in lines:
comp_res = re.match("\d+ \((.+)\) vs. \d+ \((.+)\)", line)
if comp_res is not None:
seq_name1 = comp_res.group(1)
seq_name2 = comp_res.group(2)
elif seq_name1 is not None and seq_name2 is not None:
if "dS =" in line:
stats = {}
line_stats = line.split(":")[1].strip()
# Find all of the xx = ###### values in a row
# ie dS = 0.0227
# For dN and dS, the values have 8 characters from the equals
# sign, while the rest have 7 characters. On Windows,
# NaNs take on weird values like -1.#IND, which might fill the
# entire fixed column width.
res_matches = re.findall("[dSNwrho]{1,3} =.{7,8}?",
line_stats)
for stat_pair in res_matches:
stat = stat_pair.split('=')[0].strip()
value = stat_pair.split('=')[1].strip()
try:
stats[stat] = float(value)
except:
stats[stat] = None
if "LWL85:" in line:
results[seq_name1][seq_name2]["LWL85"] = stats
results[seq_name2][seq_name1]["LWL85"] = stats
elif "LWL85m" in line:
results[seq_name1][seq_name2]["LWL85m"] = stats
results[seq_name2][seq_name1]["LWL85m"] = stats
elif "LPB93" in line:
results[seq_name1][seq_name2]["LPB93"] = stats
results[seq_name2][seq_name1]["LPB93"] = stats
return results
| 44.215385
| 85
| 0.540884
|
4a0586e8ae07e766d3053324f4da13149773c3a1
| 3,082
|
py
|
Python
|
gltbx/extract_opengl_specs.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 155
|
2016-11-23T12:52:16.000Z
|
2022-03-31T15:35:44.000Z
|
gltbx/extract_opengl_specs.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 590
|
2016-12-10T11:31:18.000Z
|
2022-03-30T23:10:09.000Z
|
gltbx/extract_opengl_specs.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 115
|
2016-11-15T08:17:28.000Z
|
2022-02-09T15:30:14.000Z
|
"""
To update opengl_specs.txt:
1. In an empty temporary directory run:
wget_opengl_specs.csh
2. In $GLTBX_DIST run:
python extract_opengl_specs.py /tmpdir/html/*/*.html > opengl_specs.txt
"""
from __future__ import absolute_import, division, print_function
import sys, os
sequential_defines = ["GL_AUX", "GL_CLIP_PLANE", "GL_LIGHT"]
def extract_defines(html_string, all_defines):
for word in html_string.split():
if ( word.startswith("<STRONG>GL_")
or word.startswith("<STRONG>GLU_")):
i = word.find("</STRONG>")
if (i < 0): continue
define = word[8:i]
if (define.endswith("_")): continue
if (define.upper() != define): continue
if (not define.replace("_","").isalnum()): continue
keep = True
for sequential_define in sequential_defines:
if (define == sequential_define):
keep = False
break
elif (define.startswith(sequential_define)):
num = define[len(sequential_define):]
try: num = int(num)
except ValueError: pass
else:
keep = False
break
if (not keep): continue
all_defines.add(define)
def extract_signatures(html_string, all_signatures):
signature_block = []
active_block = False
current_line = None
c_specification = "<STRONG>C</STRONG> <STRONG>SPECIFICATION</STRONG>"
for line in html_string.splitlines():
if (line.strip() == c_specification):
active_block = True
elif (line.strip() in [
"<STRONG>PARAMETERS</STRONG>",
"<STRONG>DESCRIPTION</STRONG>"]):
active_block = False
if (current_line is not None):
current_line = current_line.strip()
if (len(current_line) > 0):
all_signatures.append(current_line)
current_line = None
elif (active_block):
line = line.expandtabs()
line = line.replace("<STRONG>", "").replace("</STRONG>", "")
line = line.replace("<EM>", "").replace("</EM>", "")
line = line.replace("GLvoid (*CallBackFunc)(", "glu_function_pointer fn")
line = line.replace("(", " ( ")
line = line.replace(")", " ) ")
line = " ".join(line.split())
if (current_line is None):
current_line = line
elif (current_line.endswith(",")):
current_line += " " + line
else:
current_line = current_line.strip()
if (len(current_line) > 0):
all_signatures.append(current_line)
current_line = line
def run(args):
all_defines = set()
for arg in args:
extract_defines(html_string=open(arg).read(), all_defines=all_defines)
for define in sorted(all_defines):
print(define)
#
all_signatures = []
for arg in args:
if (os.path.basename(arg).lower() == "index.html"): continue
prev_len = len(all_signatures)
extract_signatures(
html_string=open(arg).read(),
all_signatures=all_signatures)
assert len(all_signatures) > prev_len
for signature in all_signatures:
print(signature)
if (__name__ == "__main__"):
run(sys.argv[1:])
| 32.104167
| 79
| 0.623621
|
4a05892cff3e4f050528c59373abaf5c0e07c92b
| 1,846
|
py
|
Python
|
app/core/migrations/0001_initial.py
|
MouadLo/recipe-app-api
|
9d72136357ec92111bd13156791e06e1cb605a52
|
[
"MIT"
] | null | null | null |
app/core/migrations/0001_initial.py
|
MouadLo/recipe-app-api
|
9d72136357ec92111bd13156791e06e1cb605a52
|
[
"MIT"
] | null | null | null |
app/core/migrations/0001_initial.py
|
MouadLo/recipe-app-api
|
9d72136357ec92111bd13156791e06e1cb605a52
|
[
"MIT"
] | 1
|
2022-02-23T04:18:37.000Z
|
2022-02-23T04:18:37.000Z
|
# Generated by Django 4.0.2 on 2022-02-23 06:07
from django.db import migrations, models
import django.db.models.manager
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
managers=[
('object', django.db.models.manager.Manager()),
],
),
]
| 48.578947
| 266
| 0.630553
|
4a0589fee6a3e2074bb7023500af30f23f8a57e5
| 17,505
|
py
|
Python
|
Logistic_regression_analysis.py
|
yochaiedlitz/T2DM_UKB_predictions
|
1e6b22e3d51d515eb065d7d5f46408f86f33d0b8
|
[
"MIT"
] | 1
|
2022-01-17T13:13:02.000Z
|
2022-01-17T13:13:02.000Z
|
Logistic_regression_analysis.py
|
yochaiedlitz/T2DM_UKB_predictions
|
1e6b22e3d51d515eb065d7d5f46408f86f33d0b8
|
[
"MIT"
] | null | null | null |
Logistic_regression_analysis.py
|
yochaiedlitz/T2DM_UKB_predictions
|
1e6b22e3d51d515eb065d7d5f46408f86f33d0b8
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import pickle
import os
from sklearn.linear_model import LogisticRegressionCV
from sklearn.metrics import roc_auc_score, make_scorer,roc_curve, auc,average_precision_score,precision_recall_curve
import matplotlib.pyplot as plt
import matplotlib
import shap
import seaborn as sns
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.image as mpimg
import sys
from addloglevels import sethandlers
import glob
from UKBB_Func_Final import SAVE_FOLDER, RUN_NAME
plt.ion() # Enables closing of plt figures on the run
sethandlers() # set handlers for queue_tal jobs
Qworker = '/home/edlitzy/pnp3/lib/queue_tal/qworker.py'
BASIC_FOLDER_NAME=SAVE_FOLDER
if not os.path.exists(SAVE_FOLDER):
os.makedirs(SAVE_FOLDER)
LR_folder_name=os.path.join(BASIC_FOLDER_NAME, "LR_comparison") #Folder to save LR comparison results
all_folders=os.listdir(BASIC_FOLDER_NAME)
relevant_folder_names=[x for x in all_folders if not(x.endswith("pdfs") or x.endswith("shap_folder") or x.endswith("imputed"))]
SHAP_FOLDER_PATH=os.path.join(BASIC_FOLDER_NAME,LR_folder_name,"shap_folder")
PDF_FOLDER_PATH=os.path.join(BASIC_FOLDER_NAME,LR_folder_name,"pdfs")
if not os.path.exists(LR_folder_name):
os.makedirs(LR_folder_name)
if not os.path.exists(SHAP_FOLDER_PATH):
os.makedirs(SHAP_FOLDER_PATH)
if not os.path.exists(PDF_FOLDER_PATH):
os.makedirs(PDF_FOLDER_PATH)
Use_Fake_Que=False
if Use_Fake_Que:
from queue_tal.qp import fakeqp as qp
else:
from queue_tal.qp import qp
# # Job_name_Array = ["Q_Vasc", "Q_Diab", "Q_Heart", "Q_Stroke", "Q_Angina", "Q_B_Pressure"]
# # if ALL_FEATURES:
# # FEAT_PATH = ["Diabetes_Features.csv", "Diabetes_Features.csv", "Diabetes_Features.csv", "Diabetes_Features.csv",
# # "Diabetes_Features.csv", "Diabetes_Features.csv"] # Full Diabetes features
# # else:
# # FEAT_PATH = ["Top_Vasc_Features.csv", "Top_Diabetes_Features.csv", "Top_Vasc_Features.csv", "Top_Vasc_Features.csv",
# # "Top_Vasc_Features.csv",
# # "Top_Vasc_Features.csv"] # Full Diabetes features
# # Job_ID = ["6150-0.0", "2443-0.0", "6150-0.0", "6150-0.0", "6150-0.0", "6150-0.0"]
# # File_Name_Array = ["Vascular_Healthy_Comb.csv", "Diabetes_Healthy_Comb.csv", "Heart_att_Healthy_Comb.csv",
# # "Stroke_Healthy_Comb.csv", "Angina_Healthy_Comb.csv", "Blood_P_Healthy_Comb.csv"]
# # # No_symp_array = [0, -7, ]
# # Sub_Class_array = ["All", "All", 1, 3, 2, 4, ]
# "2443":"Diabetes diagnosed by doctor",
# 1 Yes
# 0 No
# -1 Do not know
# -3 Prefer not to answer
# "6150":Vascular/heart problems diagnosed by doctor"
# 1 Heart attack
# 2 Angina
# 3 Stroke
# 4 High blood pressure
# -7 None of the above
# -3 Prefer not to answer
# "6152":"Blood clot, DVT, bronchitis, emphysema, asthma, rhinitis, eczema, allergy diagnosed by doctor",
# 5 Blood clot in the leg (DVT)
# 7 Blood clot in the lung
# 6 Emphysema/chronic bronchitis
# 8 Asthma
# 9 Hayfever, allergic rhinitis or eczema
# -7 None of the above
# -3 Prefer not to answer
# "2453":"Cancer diagnosed by doctor"
# 1 Yes - you will be asked about this later by an interviewer
# 0 No
# -1 Do not know
# -3 Prefer not to answer
# "2463":"Fractured/broken bones in last 5 years"
# 1 Yes
# 0 No
# -1 Do not know
# -3 Prefer not to answer
# "4041": Gestational Diabetes
# 1 Yes
# 0 No
# -2 Not applicable
# -1 Do not know
# -3 Prefer not to answer
sys.path
# explicitly require this experimental feature
# from sklearn.experimental import enable_iterative_imputer # noqa
# now you can import normally from sklearn.impute
def roc_auc_score_proba(y_true, proba):
return roc_auc_score(y_true, proba)
auc_score = make_scorer(roc_auc_score_proba, needs_proba=True)
def standarise_df(df):
# if standarize:
fit_col=df.columns
x_std_col=[x for x in fit_col if not x.endswith("_na")]
x_na_col=[x for x in fit_col if x.endswith("_na")]
x_train_std=df[x_std_col]
# print( np.std(x_train_std,axis=0))
x_train_std=(x_train_std-np.mean(x_train_std,axis=0))/ np.std(x_train_std,axis=0)
# print x_train_std.loc[:,x_train_std.isna().sum()>0]
x_train_std_na_col=x_train_std.loc[:,x_train_std.isna().sum()>0].columns.values
x_train_std.loc[:,x_train_std.isna().sum()>0]=df.loc[:,x_train_std_na_col]
# print x_train_std.loc[:,x_train_std_na_col]
x_train_std[x_na_col]=df[x_na_col]
return x_train_std
def compute_lr(job_name, Basic_folder_name="/home/edlitzy/UKBB_Tree_Runs/For_article/", penalty="l2",
Prob_HYP_PAR_ITER=200, Choose_N_Fold=3, impute_val_dict={},
strategy='most_frequent', score=auc_score, standarize=True, impute=False):
Hyp_Param_Dict_LR_cs = Prob_HYP_PAR_ITER
final_folder = os.path.join(Basic_folder_name, job_name, "Diabetes_Results")
train_data_path = os.path.join(final_folder, "Diabetestrain_Data")
test_data_path = os.path.join(final_folder, "Diabetestest_Data")
with open(train_data_path, 'rb') as fp:
train_Data = pickle.load(fp)
with open(test_data_path, 'rb') as fp:
test_Data = pickle.load(fp)
y_train = train_Data["DF_Targets"]
X_train = train_Data["df_Features"]
y_test = test_Data["DF_Targets"]
X_test = test_Data["df_Features"]
cat_names = test_Data["cat_names"]
Rel_Feat_Names = test_Data["Rel_Feat_Names"]
X_train.dropna(how="any", axis=1, inplace=True)
X_test.dropna(how="any", axis=1, inplace=True)
X_train.dropna(how="any", axis=0, inplace=True)
X_test.dropna(how="any", axis=0, inplace=True)
X_train_fit = X_train
X_test_fit = X_test
if standarize:
X_train_fit = standarise_df(X_train_fit)
X_test_fit = standarise_df(X_test_fit)
clf = LogisticRegressionCV(cv=Choose_N_Fold, random_state=None, penalty=penalty,
scoring=score, class_weight="balanced")
clf.fit(X_train_fit, y_train.values.flatten())
y_proba = clf.predict_proba(X_test_fit)
return X_train_fit, y_train, X_test_fit, y_test, X_train, X_test, y_proba, clf, Rel_Feat_Names, cat_names
# imputation of median instead on nan
def pdf_save(pdf,current_figure=[],DPI=200,plot=False):
if pdf:
print("saving pdf at plot_roc to: ",pdf)
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0,rect=[0, 0.03, 1, 0.95])
pdf.savefig(current_figure, dpi=DPI)
plt.close(current_figure)
if plot:
plt.show()
def finalise_roc(ax,lw=2,font_size=10):
ax.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel('False Positive Rate', fontsize=font_size)
ax.set_ylabel('True Positive Rate', fontsize=font_size)
ax.tick_params(axis="x", labelsize=font_size - 4)
ax.tick_params(axis="y", labelsize=font_size - 4)
ax.legend(loc="best")
# ax.set_title('Receiver operating curve', fontsize=font_size)
return ax
def finalise_PR(ax,lw=2,font_size=10):
ax.set_xlabel('Recall', fontsize=font_size)
ax.set_ylabel('Precision', fontsize=font_size)
ax.tick_params(axis="x", labelsize=font_size-4)
ax.tick_params(axis="y", labelsize=font_size-4)
ax.legend(loc='best')
ax.set_ylim([0, 1.01])
ax.set_xlim([0, 1.])
# ax.set_title('Precision recall curve', fontsize=font_size )
return ax
def plot_roc(y, y_prob, pdf=[], legend="ROC", ax=[],color="b"):
fpr, tpr, _ = roc_curve(y, y_prob)
roc_auc = auc(fpr, tpr)
lw = 2
ax.plot(fpr, tpr, lw=lw, color=color,label=legend + " curve (area = %0.2f)" % roc_auc)
return ax, roc_auc
def plot_aps(y,y_prob,legend="ROC",ax=[],lw = 2,color="b"):
precision, recall, _ = precision_recall_curve(y, y_prob)
aps = average_precision_score(y,y_prob)
ax.step(recall, precision, where='post',
label=legend + ' APS={0:0.2f}'.format(aps) + ', Prevalence= {0:0.3f}'.format(precision[0]), lw=lw,
color=color)
# axi = ax.twinx()
# # set limits for shared axis
# axi.set_ylim(ax.get_ylim())
# # set ticks for shared axis
# relative_ticks = []
# label_format = '%.1f'
# for tick in ax.get_yticks():
# tick = tick / precision[0]
# relative_ticks.append(label_format % (tick, ))
# axi.set_yticklabels(relative_ticks,fontsize=font_size)
# axi.set_ylabel('Precision fold', fontsize=font_size+4)
ax.axhline(y=precision[0], color='r', linestyle='--')
return ax,aps
def Linear_shap(clf,X_train_fit,Rel_Feat_Names,X_test_fit,x_lim=(-10, 10),max_features=10,pdf=[],shap_img_path=[]):
explainer = shap.LinearExplainer(clf, X_train_fit.values, feature_dependence="independent")
shap_values = explainer.shap_values(X_test_fit.values).astype(np.double)
# X_test_array = X_test_fit.values.tolist() # we need to pass a dense version for the plotting functions
# plt.xlim(x_lim)
shap.summary_plot(shap_values, X_test_fit.values, feature_names=Rel_Feat_Names,sort=True,max_display=max_features,
show=False)
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0, rect=[0, 0.03, 1, 0.95])
plt.savefig(shap_img_path, bbox_inches='tight', dpi=800)
plt.close("all")
img = mpimg.imread(shap_img_path)
plt.imshow(img)
plt.axis("off")
fig = plt.gcf()
pdf_save(pdf=pdf,current_figure=fig)
plt.close("all")
def linear_features_importance(Rel_Feat_Names,clf,title="Feature importance using Lasso Model",figsize=(12,9),
num_feat=10,pdf=[]):
imp_coef = pd.DataFrame({"Covariates names":Rel_Feat_Names,"Covariate coefficient":clf.coef_.flatten()})
imp_coef_sorted=imp_coef.sort_values(by="Covariate coefficient",ascending=True)
imp_coef_sorted.set_index("Covariates names",inplace=True)
top_feat=list(np.arange(0,num_feat))
bot_feat=list(np.arange(-num_feat,0))
tot_feat=top_feat+bot_feat
top_20_coeff=imp_coef_sorted.iloc[tot_feat,:]
matplotlib.rcParams['figure.figsize'] = figsize
top_20_coeff.plot.barh(figsize=figsize)
#.figure(figsize=figsize)
ax=plt.gca()
ax.set_yticklabels(top_20_coeff.index.values,fontsize=12)
ax.set_xlabel("Covariates coefficients",fontsize=12)
ax.set_title(title,fontsize=14)
# plt.ylabel("Covariates names",fontsize=18)
pdf_save(pdf=pdf,current_figure=plt.gcf())
def plot_quantiles_curve(y_pred_val, y_test_val, test_label="Quantiles", bins=100, low_quantile=0.8, top_quantile=1,
figsize=(16, 9), ax=None, pop1=None, pop1_legend=None, font_size=96, color="plasma", alpha=1,
legend="Precentile", plot_now=False, pdf=[]):
vals_df = pd.DataFrame(data={"Y_test": y_test_val, "Y_Pred": y_pred_val})
res = 1. / bins
quants_bins = [int(x * 100) / 100. for x in np.arange(low_quantile, top_quantile + res / 2, res)]
vals_df = vals_df.sort_values("Y_Pred", ascending=False)
Quants = vals_df.loc[:, "Y_Pred"].quantile(quants_bins)
Rank = pd.DataFrame()
for ind, quant in enumerate(Quants.values[:-1]):
# print(quant)
Rank.loc[np.str(ind), "Diagnosed"] = vals_df.loc[((vals_df["Y_Pred"] <= Quants.values[ind + 1]) & \
(vals_df["Y_Pred"] > quant))].loc[:,
'Y_test'].sum()
Rank.loc[np.str(ind), "All"] = vals_df.loc[((vals_df["Y_Pred"] > quant) & \
(vals_df["Y_Pred"] <= Quants.values[ind + 1]))].loc[:,
'Y_test'].count()
Rank.loc[np.str(ind), "Ratio"] = Rank.loc[np.str(ind), "Diagnosed"] / Rank.loc[np.str(ind), "All"]
# fig, ax = plt.subplots(1, 1, figsize=figsize)
try:
my_colors = sns.color_palette(color, Rank.shape[0])
except:
my_colors = color
width = 0.8
x = [item - width / 2 for item in np.arange(len(Rank.index.values))]
labels = [str(int(100 * item)) for item in np.arange(low_quantile + res, top_quantile + res / 2, res)]
pop = ax.bar(left=x, height=Rank.loc[:, "Ratio"], width=width, align='center', color=my_colors, tick_label=labels,
alpha=alpha)
ax.set_xlabel("Prediction quantile", fontsize=font_size)
ax.set_ylabel("Prevalence in quantile", fontsize=font_size)
ax.tick_params(axis='both', which='major', labelsize=font_size - 6, rotation=70)
ax.tick_params(axis='both', which='minor', labelsize=font_size - 8, rotation=70)
if pop1:
plt.legend([pop, pop1], [legend, pop1_legend], fontsize=font_size)
return ax, pop
def sort_csv(csvs_path):
print(csvs_path)
print((glob.glob(csvs_path + "/*.csv")))
all_filenames = [i for i in glob.glob(csvs_path + "/*.csv")] # combine all files in the list
combined_csv = pd.concat([pd.read_csv(f, index_col=0) for f in all_filenames])
combined_csv=combined_csv.sort_values(by="LR ROC AUC", ascending=False)
combined_csv.to_csv(os.path.join(csvs_path,"ranks_csv.csv"))
# [os.remove(file) for file in all_filenames]
return combined_csv
def summary_logistics_plots(Basic_folder_name, job_name, pdf_path, strategy='most_frequent',
score=auc_score, Prob_HYP_PAR_ITER=100, N_Fold=3):
# if the variable pdf_path=[], then the functions will not save to PDF baut rather will plot.show()
data_name = job_name
shap_img_path=os.path.join(SHAP_FOLDER_PATH,job_name+".png")
pdf=PdfPages(pdf_path)
results_df = pd.DataFrame(index=[job_name], columns=["LR ROC AUC", "LR P-R APS", "GBDT ROC AUC", "GBDT P-R APS"])
X_train_fit, y_train, X_test_fit, y_test, X_train, X_test, y_proba, clf, Rel_Feat_Names, cat_names = compute_lr(
job_name=job_name, Basic_folder_name=Basic_folder_name, penalty="l2",
Prob_HYP_PAR_ITER=Prob_HYP_PAR_ITER,
Choose_N_Fold=N_Fold, impute_val_dict={}, strategy=strategy, score=score)
with open(os.path.join(Basic_folder_name, job_name, "Diabetes_Results/y_pred_val_list"), 'rb') as fp:
y_pred_val = pickle.load(fp)
with open(os.path.join(Basic_folder_name, job_name, "Diabetes_Results/y_test_val_list"), 'rb') as fp:
y_test_val = pickle.load(fp)
fig,ax=plt.subplots(2,1)
ax[0], results_df.loc[job_name, "LR ROC AUC"] = plot_roc(y_test, y_proba[:, 1],legend="LR AUC ",
ax=ax[0],color="g")
ax[0], results_df.loc[job_name, "GBDT ROC AUC"] = plot_roc(y_test_val, y_pred_val, pdf=pdf,
legend="GBDT AUC ", ax=ax[0],color="r")
ax[0]=finalise_roc(ax[0], lw=2,font_size=10)
ax[1], results_df.loc[job_name, "LR P-R APS"] = plot_aps(y_test, y_proba[:, 1], legend="LR APS ",
ax=ax[1],color="r")
ax[1], results_df.loc[job_name, "GBDT P-R APS"] = plot_aps(y_test_val, y_pred_val,
legend="GBDT APS ", ax=ax[1],color="g")
ax[1] = finalise_PR(ax[1],lw=2,font_size=10)
fig.suptitle("ROC and P-R plots of "+job_name)
pdf_save(pdf, fig)
results_df.to_csv(os.path.join(Basic_folder_name, LR_folder_name, job_name + ".csv"))
Linear_shap(clf=clf, X_train_fit=X_train_fit, Rel_Feat_Names=Rel_Feat_Names, X_test_fit=X_test_fit,
x_lim=(-10, 10), pdf=pdf,shap_img_path=shap_img_path)
linear_features_importance(Rel_Feat_Names, clf, title=data_name + " features importance using Lasso Model",
num_feat=5, pdf=pdf)
fig,ax=plt.subplots(1,1)
ax, pop = plot_quantiles_curve(y_proba[:, 1], y_test.values.flatten(), test_label="data_name",
bins=100, low_quantile=0.5, top_quantile=1, figsize=(16, 9), font_size=12,
color="yellow", alpha=0.8, legend="LR",plot_now=False,ax=ax)
ax,pop = plot_quantiles_curve(y_pred_val, y_test_val, test_label="data_name", bins=100, low_quantile=0.5, top_quantile=1,
figsize=(16, 9), ax=ax, font_size=12, color="blue", alpha=0.3, legend="GBDT", pop1=pop,
pop1_legend="LR", plot_now=True, pdf=pdf)
ax.set_title('Quantiles comparison', fontsize=12)
pdf_save(pdf=pdf, current_figure=fig)
pdf.close()
print(("PDF save to: ",pdf_path))
def main():
with qp(jobname="LogReg", q=['himem7.q'], mem_def='4G', trds_def=1, tryrerun=True,max_u=650, delay_batch=30) as q:
os.chdir('/net/mraid08/export/jafar/Microbiome/Analyses/Edlitzy/tempq_files/')
q.startpermanentrun()
tkns = []
pdf=[]
for ind,job_name in enumerate(relevant_folder_names):
pdf_path=os.path.join(PDF_FOLDER_PATH,job_name+".pdf")
param=(BASIC_FOLDER_NAME, job_name,pdf_path)
print("job_name:", job_name)
tkns.append(q.method(summary_logistics_plots, param))
if ind == (len(relevant_folder_names) - 1):
print ("Waiting for create_PDF to finish")
q.waitforresults(tkns)
results_df=sort_csv(os.path.join(BASIC_FOLDER_NAME, LR_folder_name))
print(results_df)
if __name__=="__main__":
sethandlers()
main()
| 44.541985
| 128
| 0.659183
|
4a058b5b2230dbdb7f9db4282f67f0f8b6d7e0c3
| 3,161
|
py
|
Python
|
src/validators/ExampleValidator.py
|
JiaZhou-PU/HERON
|
e8dbbfd2d2d308047528e7242932407fc2208e49
|
[
"Apache-2.0"
] | null | null | null |
src/validators/ExampleValidator.py
|
JiaZhou-PU/HERON
|
e8dbbfd2d2d308047528e7242932407fc2208e49
|
[
"Apache-2.0"
] | null | null | null |
src/validators/ExampleValidator.py
|
JiaZhou-PU/HERON
|
e8dbbfd2d2d308047528e7242932407fc2208e49
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020, Battelle Energy Alliance, LLC
# ALL RIGHTS RESERVED
"""
Example class for validators.
"""
import numpy as np
from utils import InputData, InputTypes
from .Validator import Validator
class Example(Validator):
"""
Example class for validating dispatch decisions.
Arbitrarily, uses percent based ramping limits
"""
# ---------------------------------------------
# INITIALIZATION
@classmethod
def get_input_specs(cls):
"""
Set acceptable input specifications.
@ In, None
@ Out, specs, InputData, specs
"""
specs = Validator.get_input_specs()
specs.name = 'Example'
specs.description = r"""Uses a demonstration-only validator that constrains the change
for any resource to a constant ``delta''."""
specs.addSub(InputData.parameterInputFactory('delta', contentType=InputTypes.FloatType,
descr=r"""the maximum absolute change in any resource between successive time steps."""))
specs.addSub(InputData.parameterInputFactory('tolerance', contentType=InputTypes.FloatType,
descr=r"""the strictness with which the constraint should be enforced. Note that some small
numerical exception is expected."""))
return specs
def __init__(self):
"""
Constructor.
@ In, None
@ Out, None
"""
self.name = 'BaseValidator'
self._allowable = 0.5
self._tolerance = 1e-14
def read_input(self, inputs):
"""
Loads settings based on provided inputs
@ In, inputs, InputData.InputSpecs, input specifications
@ Out, None
"""
delta = inputs.findFirst('delta')
if delta:
self._allowable = delta.value
tol = inputs.findFirst('tolerance')
if tol:
self._tolerance = tol.value
# ---------------------------------------------
# API
def validate(self, components, dispatch, times):
"""
Method to validate a dispatch activity.
@ In, components, list, HERON components whose cashflows should be evaluated
@ In, activity, DispatchState instance, activity by component/resources/time
@ In, times, np.array(float), time values to evaluate; may be length 1 or longer
@ Out, errs, list, information about validation failures
"""
errs = [] # TODO best format for this?
for comp, info in dispatch._resources.items():
for res in info:
for t, time in enumerate(times):
current = dispatch.get_activity(comp, res, time)
if t > 0:
previous = dispatch.get_activity(comp, res, times[t-1])
delta = current - previous
sign = np.sign(delta)
if abs(delta) - self._allowable > self._tolerance:
errs.append({'msg': f'Exceeded ramp of {self._allowable} with {delta:1.8e}',
'limit': previous + (sign * self._allowable),
'limit_type': 'lower' if (sign < 0) else 'upper',
'component': comp,
'resource': res,
'time': time,
'time_index': t,
})
return errs
| 34.736264
| 99
| 0.597912
|
4a058c682544fc4838f46d766853bb2e06603a77
| 861
|
py
|
Python
|
Quickly_Find_Multiple_Left_Rotations_of_Array.py
|
pushpakjalan02/GeeksForGeeks-Solutions
|
22bc880672eca39a1b1fbdab8f9067bee46fa7a2
|
[
"MIT"
] | null | null | null |
Quickly_Find_Multiple_Left_Rotations_of_Array.py
|
pushpakjalan02/GeeksForGeeks-Solutions
|
22bc880672eca39a1b1fbdab8f9067bee46fa7a2
|
[
"MIT"
] | null | null | null |
Quickly_Find_Multiple_Left_Rotations_of_Array.py
|
pushpakjalan02/GeeksForGeeks-Solutions
|
22bc880672eca39a1b1fbdab8f9067bee46fa7a2
|
[
"MIT"
] | null | null | null |
# Quickly find multiple left rotations of an array | Set 1
# Given an array of size n and multiple values around which we need to left rotate the array. How to quickly find multiple left rotations?
# URL: https://www.geeksforgeeks.org/quickly-find-multiple-left-rotations-of-an-array/
import sys
def display_rotations(list_of_nos, nos, rotations):
rotations = rotations % nos
for i in range(rotations, rotations + nos):
print(list_of_nos[i%nos], end = " ")
return
def main():
nos = int(input("Enter No. of Nos.: "))
list_of_nos = list(map(int, input("Enter the Nos.: ").strip().split()))
if(len(list_of_nos) != nos):
print("Invalid Input.")
sys.exit(0)
rotations = int(input("Enter No. of Rotations: "))
display_rotations(list_of_nos, nos, rotations)
return
if __name__ == "__main__":
main()
| 33.115385
| 138
| 0.674797
|
4a058c817ede39d8392f288523fe1ecf9a7d60ba
| 247
|
py
|
Python
|
test/python/quantum_info/__init__.py
|
ismaila-at-za-ibm/qiskit-terra
|
08303ec98ac7b33fde55266dc3a74466fbdcae95
|
[
"Apache-2.0"
] | 2
|
2021-09-06T19:25:36.000Z
|
2021-11-17T10:46:12.000Z
|
test/python/quantum_info/__init__.py
|
ismaila-at-za-ibm/qiskit-terra
|
08303ec98ac7b33fde55266dc3a74466fbdcae95
|
[
"Apache-2.0"
] | null | null | null |
test/python/quantum_info/__init__.py
|
ismaila-at-za-ibm/qiskit-terra
|
08303ec98ac7b33fde55266dc3a74466fbdcae95
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2018, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
"""Qiskit quantum information integration tests."""
| 27.444444
| 77
| 0.720648
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.