body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
328109e1d3ed582473c641db70a3b77d3afd324e78d053a6aec12205b7899478
def build_args(parser: ArgumentParser): 'Constructs the command-line arguments for ``catalyst-dl run``.' parser.add_argument('--config', '--configs', '-C', nargs='+', help='path to config/configs', metavar='CONFIG_PATH', dest='configs', required=True) parser.add_argument('--expdir', type=str, default=None) parser.add_argument('--baselogdir', type=str, default=None) boolean_flag(parser, 'verbose', default=None) boolean_flag(parser, 'timeit', default=None) boolean_flag(parser, 'deterministic', default=None, help='Deterministic mode if running in CuDNN backend') boolean_flag(parser, 'benchmark', default=None, help='Use CuDNN benchmark') parser.add_argument('--storage', type=int, default=None) parser.add_argument('--study-name', type=str, default=None) parser.add_argument('--direction', type=str, default=None) parser.add_argument('--n-trials', type=int, default=None) parser.add_argument('--timeout', type=int, default=None) parser.add_argument('--n-jobs', type=int, default=None) boolean_flag(parser, 'gc-after-trial', default=False) boolean_flag(parser, 'show-progress-bar', default=False) return parser
Constructs the command-line arguments for ``catalyst-dl run``.
catalyst/dl/scripts/tune.py
build_args
Thiefwerty/catalyst
2,693
python
def build_args(parser: ArgumentParser): parser.add_argument('--config', '--configs', '-C', nargs='+', help='path to config/configs', metavar='CONFIG_PATH', dest='configs', required=True) parser.add_argument('--expdir', type=str, default=None) parser.add_argument('--baselogdir', type=str, default=None) boolean_flag(parser, 'verbose', default=None) boolean_flag(parser, 'timeit', default=None) boolean_flag(parser, 'deterministic', default=None, help='Deterministic mode if running in CuDNN backend') boolean_flag(parser, 'benchmark', default=None, help='Use CuDNN benchmark') parser.add_argument('--storage', type=int, default=None) parser.add_argument('--study-name', type=str, default=None) parser.add_argument('--direction', type=str, default=None) parser.add_argument('--n-trials', type=int, default=None) parser.add_argument('--timeout', type=int, default=None) parser.add_argument('--n-jobs', type=int, default=None) boolean_flag(parser, 'gc-after-trial', default=False) boolean_flag(parser, 'show-progress-bar', default=False) return parser
def build_args(parser: ArgumentParser): parser.add_argument('--config', '--configs', '-C', nargs='+', help='path to config/configs', metavar='CONFIG_PATH', dest='configs', required=True) parser.add_argument('--expdir', type=str, default=None) parser.add_argument('--baselogdir', type=str, default=None) boolean_flag(parser, 'verbose', default=None) boolean_flag(parser, 'timeit', default=None) boolean_flag(parser, 'deterministic', default=None, help='Deterministic mode if running in CuDNN backend') boolean_flag(parser, 'benchmark', default=None, help='Use CuDNN benchmark') parser.add_argument('--storage', type=int, default=None) parser.add_argument('--study-name', type=str, default=None) parser.add_argument('--direction', type=str, default=None) parser.add_argument('--n-trials', type=int, default=None) parser.add_argument('--timeout', type=int, default=None) parser.add_argument('--n-jobs', type=int, default=None) boolean_flag(parser, 'gc-after-trial', default=False) boolean_flag(parser, 'show-progress-bar', default=False) return parser<|docstring|>Constructs the command-line arguments for ``catalyst-dl run``.<|endoftext|>
2615b69493626b8598c0000703b58cd74198f4aa5867a161178bd8543a4b6057
def parse_args(): 'Parses the command line arguments and returns arguments and config.' parser = argparse.ArgumentParser() build_args(parser) (args, unknown_args) = parser.parse_known_args() return (args, unknown_args)
Parses the command line arguments and returns arguments and config.
catalyst/dl/scripts/tune.py
parse_args
Thiefwerty/catalyst
2,693
python
def parse_args(): parser = argparse.ArgumentParser() build_args(parser) (args, unknown_args) = parser.parse_known_args() return (args, unknown_args)
def parse_args(): parser = argparse.ArgumentParser() build_args(parser) (args, unknown_args) = parser.parse_known_args() return (args, unknown_args)<|docstring|>Parses the command line arguments and returns arguments and config.<|endoftext|>
53f5964a8050f883ca4e76ed3db0904143aa8c9fca499f5cb7f817b659b5e80d
def main(args, unknown_args): 'Runs the ``catalyst-dl tune`` script.' (args, config) = parse_args_uargs(args, unknown_args) set_global_seed(args.seed) prepare_cudnn(args.deterministic, args.benchmark) def objective(trial: optuna.trial): (trial, trial_config) = _process_trial_config(trial, config.copy()) runner: ConfigRunner = get_config_runner(expdir=Path(args.expdir), config=trial_config) runner._trial = trial if (get_rank() <= 0): dump_environment(logdir=runner.logdir, config=trial_config, configs_path=args.configs) dump_code(expdir=args.expdir, logdir=runner.logdir) runner.run() return trial.best_score study_params = config.pop('study', {}) sampler_params = study_params.pop('sampler', {}) optuna_sampler_type = sampler_params.pop('_target_', None) optuna_sampler = (optuna.samplers.__dict__[optuna_sampler_type](**sampler_params) if (optuna_sampler_type is not None) else None) pruner_params = study_params.pop('pruner', {}) optuna_pruner_type = pruner_params.pop('_target_', None) optuna_pruner = (optuna.pruners.__dict__[optuna_pruner_type](**pruner_params) if (optuna_pruner_type is not None) else None) study = optuna.create_study(direction=(args.direction or study_params.pop('direction', 'minimize')), storage=(args.storage or study_params.pop('storage', None)), study_name=(args.study_name or study_params.pop('study_name', None)), sampler=optuna_sampler, pruner=optuna_pruner, **study_params) study.optimize(objective, n_trials=args.n_trials, timeout=args.timeout, n_jobs=(args.n_jobs or 1), gc_after_trial=args.gc_after_trial, show_progress_bar=args.show_progress_bar)
Runs the ``catalyst-dl tune`` script.
catalyst/dl/scripts/tune.py
main
Thiefwerty/catalyst
2,693
python
def main(args, unknown_args): (args, config) = parse_args_uargs(args, unknown_args) set_global_seed(args.seed) prepare_cudnn(args.deterministic, args.benchmark) def objective(trial: optuna.trial): (trial, trial_config) = _process_trial_config(trial, config.copy()) runner: ConfigRunner = get_config_runner(expdir=Path(args.expdir), config=trial_config) runner._trial = trial if (get_rank() <= 0): dump_environment(logdir=runner.logdir, config=trial_config, configs_path=args.configs) dump_code(expdir=args.expdir, logdir=runner.logdir) runner.run() return trial.best_score study_params = config.pop('study', {}) sampler_params = study_params.pop('sampler', {}) optuna_sampler_type = sampler_params.pop('_target_', None) optuna_sampler = (optuna.samplers.__dict__[optuna_sampler_type](**sampler_params) if (optuna_sampler_type is not None) else None) pruner_params = study_params.pop('pruner', {}) optuna_pruner_type = pruner_params.pop('_target_', None) optuna_pruner = (optuna.pruners.__dict__[optuna_pruner_type](**pruner_params) if (optuna_pruner_type is not None) else None) study = optuna.create_study(direction=(args.direction or study_params.pop('direction', 'minimize')), storage=(args.storage or study_params.pop('storage', None)), study_name=(args.study_name or study_params.pop('study_name', None)), sampler=optuna_sampler, pruner=optuna_pruner, **study_params) study.optimize(objective, n_trials=args.n_trials, timeout=args.timeout, n_jobs=(args.n_jobs or 1), gc_after_trial=args.gc_after_trial, show_progress_bar=args.show_progress_bar)
def main(args, unknown_args): (args, config) = parse_args_uargs(args, unknown_args) set_global_seed(args.seed) prepare_cudnn(args.deterministic, args.benchmark) def objective(trial: optuna.trial): (trial, trial_config) = _process_trial_config(trial, config.copy()) runner: ConfigRunner = get_config_runner(expdir=Path(args.expdir), config=trial_config) runner._trial = trial if (get_rank() <= 0): dump_environment(logdir=runner.logdir, config=trial_config, configs_path=args.configs) dump_code(expdir=args.expdir, logdir=runner.logdir) runner.run() return trial.best_score study_params = config.pop('study', {}) sampler_params = study_params.pop('sampler', {}) optuna_sampler_type = sampler_params.pop('_target_', None) optuna_sampler = (optuna.samplers.__dict__[optuna_sampler_type](**sampler_params) if (optuna_sampler_type is not None) else None) pruner_params = study_params.pop('pruner', {}) optuna_pruner_type = pruner_params.pop('_target_', None) optuna_pruner = (optuna.pruners.__dict__[optuna_pruner_type](**pruner_params) if (optuna_pruner_type is not None) else None) study = optuna.create_study(direction=(args.direction or study_params.pop('direction', 'minimize')), storage=(args.storage or study_params.pop('storage', None)), study_name=(args.study_name or study_params.pop('study_name', None)), sampler=optuna_sampler, pruner=optuna_pruner, **study_params) study.optimize(objective, n_trials=args.n_trials, timeout=args.timeout, n_jobs=(args.n_jobs or 1), gc_after_trial=args.gc_after_trial, show_progress_bar=args.show_progress_bar)<|docstring|>Runs the ``catalyst-dl tune`` script.<|endoftext|>
08957769647b050eb6b3ed8b394277332de8450b6f41ee9a7e7e1fb007855a5d
@property def num_eqn(self): '(int) - Number of unknowns (components of q)' if (self.q_da is None): raise Exception('state.num_eqn has not been set.') else: return self.q_da.dof
(int) - Number of unknowns (components of q)
src/petclaw/state.py
num_eqn
hadjimy/pyclaw
98
python
@property def num_eqn(self): if (self.q_da is None): raise Exception('state.num_eqn has not been set.') else: return self.q_da.dof
@property def num_eqn(self): if (self.q_da is None): raise Exception('state.num_eqn has not been set.') else: return self.q_da.dof<|docstring|>(int) - Number of unknowns (components of q)<|endoftext|>
f40bddd3b62bc7e72b850d73c993a26dd5ca57cc1fbff383c0b7614b2e864a41
@property def num_aux(self): '(int) - Number of auxiliary fields' if (self.aux_da is None): return 0 else: return self.aux_da.dof
(int) - Number of auxiliary fields
src/petclaw/state.py
num_aux
hadjimy/pyclaw
98
python
@property def num_aux(self): if (self.aux_da is None): return 0 else: return self.aux_da.dof
@property def num_aux(self): if (self.aux_da is None): return 0 else: return self.aux_da.dof<|docstring|>(int) - Number of auxiliary fields<|endoftext|>
c3d496d7b7d2131cef99d790b3fd9cecb426cb631843e6f55783355cd9e87243
@property def mp(self): '(int) - Number of derived quantities (components of p)' if (self._p_da is None): raise Exception('state.mp has not been set.') else: return self._p_da.dof
(int) - Number of derived quantities (components of p)
src/petclaw/state.py
mp
hadjimy/pyclaw
98
python
@property def mp(self): if (self._p_da is None): raise Exception('state.mp has not been set.') else: return self._p_da.dof
@property def mp(self): if (self._p_da is None): raise Exception('state.mp has not been set.') else: return self._p_da.dof<|docstring|>(int) - Number of derived quantities (components of p)<|endoftext|>
c44228f701fec912c8a153b91b851e9523a5a1099984a605bd2f0d2a8faf6834
@property def mF(self): '(int) - Number of derived quantities (components of p)' if (self._F_da is None): raise Exception('state.mF has not been set.') else: return self._F_da.dof
(int) - Number of derived quantities (components of p)
src/petclaw/state.py
mF
hadjimy/pyclaw
98
python
@property def mF(self): if (self._F_da is None): raise Exception('state.mF has not been set.') else: return self._F_da.dof
@property def mF(self): if (self._F_da is None): raise Exception('state.mF has not been set.') else: return self._F_da.dof<|docstring|>(int) - Number of derived quantities (components of p)<|endoftext|>
937f6601bc354ad5c653ea6fe58150b6c0312be42f235ca7227a33f7e7395630
@property def q(self): '\n Array of solution values.\n ' shape = self.grid.num_cells shape.insert(0, self.num_eqn) return self.gqVec.getArray().reshape(shape, order='F')
Array of solution values.
src/petclaw/state.py
q
hadjimy/pyclaw
98
python
@property def q(self): '\n \n ' shape = self.grid.num_cells shape.insert(0, self.num_eqn) return self.gqVec.getArray().reshape(shape, order='F')
@property def q(self): '\n \n ' shape = self.grid.num_cells shape.insert(0, self.num_eqn) return self.gqVec.getArray().reshape(shape, order='F')<|docstring|>Array of solution values.<|endoftext|>
b1b5a3812b89659f0ddad02e3e9587b42c3c1455c424ec8b48f907fc82473ac6
@property def p(self): '\n Array containing values of derived quantities for output.\n ' if (self._p_da is None): return 0 shape = self.grid.num_cells shape.insert(0, self.mp) p = self.gpVec.getArray().reshape(shape, order='F') return p
Array containing values of derived quantities for output.
src/petclaw/state.py
p
hadjimy/pyclaw
98
python
@property def p(self): '\n \n ' if (self._p_da is None): return 0 shape = self.grid.num_cells shape.insert(0, self.mp) p = self.gpVec.getArray().reshape(shape, order='F') return p
@property def p(self): '\n \n ' if (self._p_da is None): return 0 shape = self.grid.num_cells shape.insert(0, self.mp) p = self.gpVec.getArray().reshape(shape, order='F') return p<|docstring|>Array containing values of derived quantities for output.<|endoftext|>
340f76ca22599716a98f02869c2528810c6fa32cddac84b45d328660607bb431
@property def F(self): '\n Array containing pointwise values (densities) of output functionals.\n This is just used as temporary workspace before summing.\n ' if (self._F_da is None): return 0 shape = self.grid.num_cells shape.insert(0, self.mF) F = self.gFVec.getArray().reshape(shape, order='F') return F
Array containing pointwise values (densities) of output functionals. This is just used as temporary workspace before summing.
src/petclaw/state.py
F
hadjimy/pyclaw
98
python
@property def F(self): '\n Array containing pointwise values (densities) of output functionals.\n This is just used as temporary workspace before summing.\n ' if (self._F_da is None): return 0 shape = self.grid.num_cells shape.insert(0, self.mF) F = self.gFVec.getArray().reshape(shape, order='F') return F
@property def F(self): '\n Array containing pointwise values (densities) of output functionals.\n This is just used as temporary workspace before summing.\n ' if (self._F_da is None): return 0 shape = self.grid.num_cells shape.insert(0, self.mF) F = self.gFVec.getArray().reshape(shape, order='F') return F<|docstring|>Array containing pointwise values (densities) of output functionals. This is just used as temporary workspace before summing.<|endoftext|>
67b0217361d6bb6009577136f35a55fa2ccd3d296e6d3804e9b0d7377c33cade
@property def aux(self): '\n We never communicate aux values; every processor should set its own ghost cell\n values for the aux array. The global aux vector is used only for outputting\n the aux values to file; everywhere else we use the local vector.\n ' if (self.aux_da is None): return None shape = self.grid.num_cells shape.insert(0, self.num_aux) aux = self.gauxVec.getArray().reshape(shape, order='F') return aux
We never communicate aux values; every processor should set its own ghost cell values for the aux array. The global aux vector is used only for outputting the aux values to file; everywhere else we use the local vector.
src/petclaw/state.py
aux
hadjimy/pyclaw
98
python
@property def aux(self): '\n We never communicate aux values; every processor should set its own ghost cell\n values for the aux array. The global aux vector is used only for outputting\n the aux values to file; everywhere else we use the local vector.\n ' if (self.aux_da is None): return None shape = self.grid.num_cells shape.insert(0, self.num_aux) aux = self.gauxVec.getArray().reshape(shape, order='F') return aux
@property def aux(self): '\n We never communicate aux values; every processor should set its own ghost cell\n values for the aux array. The global aux vector is used only for outputting\n the aux values to file; everywhere else we use the local vector.\n ' if (self.aux_da is None): return None shape = self.grid.num_cells shape.insert(0, self.num_aux) aux = self.gauxVec.getArray().reshape(shape, order='F') return aux<|docstring|>We never communicate aux values; every processor should set its own ghost cell values for the aux array. The global aux vector is used only for outputting the aux values to file; everywhere else we use the local vector.<|endoftext|>
0fd03e72d0ed216f5b33f034c0ef1479e397cf4936c6a102933dbca76c4d145b
def __init__(self, geom, num_eqn, num_aux=0): "\n Here we don't call super because q and aux must be properties in PetClaw\n but should not be properties in PyClaw.\n\n :attributes:\n patch - The patch this state lives on\n " from clawpack.pyclaw import geometry if isinstance(geom, geometry.Patch): self.patch = geom elif isinstance(geom, geometry.Domain): self.patch = geom.patches[0] else: raise Exception('A PetClaw State object must be initialized with\n a PetClaw Patch or Domain object.') self.aux_da = None self.q_da = None self._p_da = None self.gpVec = None self._F_da = None self.gFVec = None self.problem_data = {} '(dict) - Dictionary of global values for this patch, \n ``default = {}``' self.t = 0.0 '(float) - Current time represented on this patch, \n ``default = 0.0``' self.index_capa = (- 1) self.keep_gauges = False '(bool) - Keep gauge values in memory for every time step, \n ``default = False``' self.gauge_data = [] '(list) - List of numpy.ndarray objects. Each element of the list\n stores the values of the corresponding gauge if ``keep_gauges`` is set\n to ``True``' self._init_q_da(num_eqn) if (num_aux > 0): self._init_aux_da(num_aux)
Here we don't call super because q and aux must be properties in PetClaw but should not be properties in PyClaw. :attributes: patch - The patch this state lives on
src/petclaw/state.py
__init__
hadjimy/pyclaw
98
python
def __init__(self, geom, num_eqn, num_aux=0): "\n Here we don't call super because q and aux must be properties in PetClaw\n but should not be properties in PyClaw.\n\n :attributes:\n patch - The patch this state lives on\n " from clawpack.pyclaw import geometry if isinstance(geom, geometry.Patch): self.patch = geom elif isinstance(geom, geometry.Domain): self.patch = geom.patches[0] else: raise Exception('A PetClaw State object must be initialized with\n a PetClaw Patch or Domain object.') self.aux_da = None self.q_da = None self._p_da = None self.gpVec = None self._F_da = None self.gFVec = None self.problem_data = {} '(dict) - Dictionary of global values for this patch, \n ``default = {}``' self.t = 0.0 '(float) - Current time represented on this patch, \n ``default = 0.0``' self.index_capa = (- 1) self.keep_gauges = False '(bool) - Keep gauge values in memory for every time step, \n ``default = False``' self.gauge_data = [] '(list) - List of numpy.ndarray objects. Each element of the list\n stores the values of the corresponding gauge if ``keep_gauges`` is set\n to ``True``' self._init_q_da(num_eqn) if (num_aux > 0): self._init_aux_da(num_aux)
def __init__(self, geom, num_eqn, num_aux=0): "\n Here we don't call super because q and aux must be properties in PetClaw\n but should not be properties in PyClaw.\n\n :attributes:\n patch - The patch this state lives on\n " from clawpack.pyclaw import geometry if isinstance(geom, geometry.Patch): self.patch = geom elif isinstance(geom, geometry.Domain): self.patch = geom.patches[0] else: raise Exception('A PetClaw State object must be initialized with\n a PetClaw Patch or Domain object.') self.aux_da = None self.q_da = None self._p_da = None self.gpVec = None self._F_da = None self.gFVec = None self.problem_data = {} '(dict) - Dictionary of global values for this patch, \n ``default = {}``' self.t = 0.0 '(float) - Current time represented on this patch, \n ``default = 0.0``' self.index_capa = (- 1) self.keep_gauges = False '(bool) - Keep gauge values in memory for every time step, \n ``default = False``' self.gauge_data = [] '(list) - List of numpy.ndarray objects. Each element of the list\n stores the values of the corresponding gauge if ``keep_gauges`` is set\n to ``True``' self._init_q_da(num_eqn) if (num_aux > 0): self._init_aux_da(num_aux)<|docstring|>Here we don't call super because q and aux must be properties in PetClaw but should not be properties in PyClaw. :attributes: patch - The patch this state lives on<|endoftext|>
54ed87a48231cfdd62f14c0b803503e57f645433bdbbaf70f5cc522b24e9bb28
def _init_aux_da(self, num_aux, num_ghost=0): '\n Initializes PETSc DA and global & local Vectors for handling the\n auxiliary array, aux. \n \n Initializes aux_da, gauxVec and _aux_local_vector.\n ' self.aux_da = self._create_DA(num_aux, num_ghost) self.gauxVec = self.aux_da.createGlobalVector() self._aux_local_vector = self.aux_da.createLocalVector()
Initializes PETSc DA and global & local Vectors for handling the auxiliary array, aux. Initializes aux_da, gauxVec and _aux_local_vector.
src/petclaw/state.py
_init_aux_da
hadjimy/pyclaw
98
python
def _init_aux_da(self, num_aux, num_ghost=0): '\n Initializes PETSc DA and global & local Vectors for handling the\n auxiliary array, aux. \n \n Initializes aux_da, gauxVec and _aux_local_vector.\n ' self.aux_da = self._create_DA(num_aux, num_ghost) self.gauxVec = self.aux_da.createGlobalVector() self._aux_local_vector = self.aux_da.createLocalVector()
def _init_aux_da(self, num_aux, num_ghost=0): '\n Initializes PETSc DA and global & local Vectors for handling the\n auxiliary array, aux. \n \n Initializes aux_da, gauxVec and _aux_local_vector.\n ' self.aux_da = self._create_DA(num_aux, num_ghost) self.gauxVec = self.aux_da.createGlobalVector() self._aux_local_vector = self.aux_da.createLocalVector()<|docstring|>Initializes PETSc DA and global & local Vectors for handling the auxiliary array, aux. Initializes aux_da, gauxVec and _aux_local_vector.<|endoftext|>
fb0277ddecf6961a0500b18f20e4d893050e31dd45790a2c2b64b3d60930586f
def _init_q_da(self, num_eqn, num_ghost=0): '\n Initializes PETSc DA and Vecs for handling the solution, q. \n \n Initializes q_da, gqVec and _q_local_vector.\n ' self.q_da = self._create_DA(num_eqn, num_ghost) self.gqVec = self.q_da.createGlobalVector() self._q_local_vector = self.q_da.createLocalVector()
Initializes PETSc DA and Vecs for handling the solution, q. Initializes q_da, gqVec and _q_local_vector.
src/petclaw/state.py
_init_q_da
hadjimy/pyclaw
98
python
def _init_q_da(self, num_eqn, num_ghost=0): '\n Initializes PETSc DA and Vecs for handling the solution, q. \n \n Initializes q_da, gqVec and _q_local_vector.\n ' self.q_da = self._create_DA(num_eqn, num_ghost) self.gqVec = self.q_da.createGlobalVector() self._q_local_vector = self.q_da.createLocalVector()
def _init_q_da(self, num_eqn, num_ghost=0): '\n Initializes PETSc DA and Vecs for handling the solution, q. \n \n Initializes q_da, gqVec and _q_local_vector.\n ' self.q_da = self._create_DA(num_eqn, num_ghost) self.gqVec = self.q_da.createGlobalVector() self._q_local_vector = self.q_da.createLocalVector()<|docstring|>Initializes PETSc DA and Vecs for handling the solution, q. Initializes q_da, gqVec and _q_local_vector.<|endoftext|>
606d4eba699d2b2e67825f218d96a6dc5530717f27e36054a5a890edfad41b2f
def _create_DA(self, dof, num_ghost=0): 'Returns a PETSc DA and associated global Vec.\n Note that no local vector is returned.\n ' from petsc4py import PETSc if hasattr(PETSc.DA, 'PeriodicType'): if (self.num_dim == 1): periodic_type = PETSc.DA.PeriodicType.X elif (self.num_dim == 2): periodic_type = PETSc.DA.PeriodicType.XY elif (self.num_dim == 3): periodic_type = PETSc.DA.PeriodicType.XYZ else: raise Exception('Invalid number of dimensions') DA = PETSc.DA().create(dim=self.num_dim, dof=dof, sizes=self.patch.num_cells_global, periodic_type=periodic_type, stencil_width=num_ghost, comm=PETSc.COMM_WORLD) else: DA = PETSc.DA().create(dim=self.num_dim, dof=dof, sizes=self.patch.num_cells_global, boundary_type=PETSc.DA.BoundaryType.PERIODIC, stencil_width=num_ghost, comm=PETSc.COMM_WORLD) return DA
Returns a PETSc DA and associated global Vec. Note that no local vector is returned.
src/petclaw/state.py
_create_DA
hadjimy/pyclaw
98
python
def _create_DA(self, dof, num_ghost=0): 'Returns a PETSc DA and associated global Vec.\n Note that no local vector is returned.\n ' from petsc4py import PETSc if hasattr(PETSc.DA, 'PeriodicType'): if (self.num_dim == 1): periodic_type = PETSc.DA.PeriodicType.X elif (self.num_dim == 2): periodic_type = PETSc.DA.PeriodicType.XY elif (self.num_dim == 3): periodic_type = PETSc.DA.PeriodicType.XYZ else: raise Exception('Invalid number of dimensions') DA = PETSc.DA().create(dim=self.num_dim, dof=dof, sizes=self.patch.num_cells_global, periodic_type=periodic_type, stencil_width=num_ghost, comm=PETSc.COMM_WORLD) else: DA = PETSc.DA().create(dim=self.num_dim, dof=dof, sizes=self.patch.num_cells_global, boundary_type=PETSc.DA.BoundaryType.PERIODIC, stencil_width=num_ghost, comm=PETSc.COMM_WORLD) return DA
def _create_DA(self, dof, num_ghost=0): 'Returns a PETSc DA and associated global Vec.\n Note that no local vector is returned.\n ' from petsc4py import PETSc if hasattr(PETSc.DA, 'PeriodicType'): if (self.num_dim == 1): periodic_type = PETSc.DA.PeriodicType.X elif (self.num_dim == 2): periodic_type = PETSc.DA.PeriodicType.XY elif (self.num_dim == 3): periodic_type = PETSc.DA.PeriodicType.XYZ else: raise Exception('Invalid number of dimensions') DA = PETSc.DA().create(dim=self.num_dim, dof=dof, sizes=self.patch.num_cells_global, periodic_type=periodic_type, stencil_width=num_ghost, comm=PETSc.COMM_WORLD) else: DA = PETSc.DA().create(dim=self.num_dim, dof=dof, sizes=self.patch.num_cells_global, boundary_type=PETSc.DA.BoundaryType.PERIODIC, stencil_width=num_ghost, comm=PETSc.COMM_WORLD) return DA<|docstring|>Returns a PETSc DA and associated global Vec. Note that no local vector is returned.<|endoftext|>
45af66bd2758cff01a076f51eb2914788cc73dd1265cc72f48c2ced9e3bdd691
def get_qbc_from_q(self, num_ghost, qbc): '\n Returns q with ghost cells attached, by accessing the local vector.\n ' shape = [(n + (2 * num_ghost)) for n in self.grid.num_cells] self.q_da.globalToLocal(self.gqVec, self._q_local_vector) shape.insert(0, self.num_eqn) return self._q_local_vector.getArray().reshape(shape, order='F')
Returns q with ghost cells attached, by accessing the local vector.
src/petclaw/state.py
get_qbc_from_q
hadjimy/pyclaw
98
python
def get_qbc_from_q(self, num_ghost, qbc): '\n \n ' shape = [(n + (2 * num_ghost)) for n in self.grid.num_cells] self.q_da.globalToLocal(self.gqVec, self._q_local_vector) shape.insert(0, self.num_eqn) return self._q_local_vector.getArray().reshape(shape, order='F')
def get_qbc_from_q(self, num_ghost, qbc): '\n \n ' shape = [(n + (2 * num_ghost)) for n in self.grid.num_cells] self.q_da.globalToLocal(self.gqVec, self._q_local_vector) shape.insert(0, self.num_eqn) return self._q_local_vector.getArray().reshape(shape, order='F')<|docstring|>Returns q with ghost cells attached, by accessing the local vector.<|endoftext|>
add78e328914a6d5481524810f249fd4231fdf46eff1864820be6c8ae8d3797f
def get_auxbc_from_aux(self, num_ghost, auxbc): '\n Returns aux with ghost cells attached, by accessing the local vector.\n ' shape = [(n + (2 * num_ghost)) for n in self.grid.num_cells] self.aux_da.globalToLocal(self.gauxVec, self._aux_local_vector) shape.insert(0, self.num_aux) return self._aux_local_vector.getArray().reshape(shape, order='F')
Returns aux with ghost cells attached, by accessing the local vector.
src/petclaw/state.py
get_auxbc_from_aux
hadjimy/pyclaw
98
python
def get_auxbc_from_aux(self, num_ghost, auxbc): '\n \n ' shape = [(n + (2 * num_ghost)) for n in self.grid.num_cells] self.aux_da.globalToLocal(self.gauxVec, self._aux_local_vector) shape.insert(0, self.num_aux) return self._aux_local_vector.getArray().reshape(shape, order='F')
def get_auxbc_from_aux(self, num_ghost, auxbc): '\n \n ' shape = [(n + (2 * num_ghost)) for n in self.grid.num_cells] self.aux_da.globalToLocal(self.gauxVec, self._aux_local_vector) shape.insert(0, self.num_aux) return self._aux_local_vector.getArray().reshape(shape, order='F')<|docstring|>Returns aux with ghost cells attached, by accessing the local vector.<|endoftext|>
4f6429e284146fcd6dab0ee07e28193d787cbe1cb1d2e44d868a9d2ac0d60b76
def set_num_ghost(self, num_ghost): "\n This is a hack to deal with the fact that petsc4py\n doesn't allow us to change the stencil_width (num_ghost).\n\n Instead, we initially create DAs with stencil_width=0.\n Then, in solver.setup(), we call this function to replace\n those DAs with new ones that have the right stencil width.\n\n This could be made more efficient using some PETSc calls,\n but it only happens once so it seems not to be worth it.\n " q0 = self.q.copy() self._init_q_da(self.num_eqn, num_ghost) self.q = q0 if (self.aux is not None): aux0 = self.aux.copy() self._init_aux_da(self.num_aux, num_ghost) self.aux = aux0
This is a hack to deal with the fact that petsc4py doesn't allow us to change the stencil_width (num_ghost). Instead, we initially create DAs with stencil_width=0. Then, in solver.setup(), we call this function to replace those DAs with new ones that have the right stencil width. This could be made more efficient using some PETSc calls, but it only happens once so it seems not to be worth it.
src/petclaw/state.py
set_num_ghost
hadjimy/pyclaw
98
python
def set_num_ghost(self, num_ghost): "\n This is a hack to deal with the fact that petsc4py\n doesn't allow us to change the stencil_width (num_ghost).\n\n Instead, we initially create DAs with stencil_width=0.\n Then, in solver.setup(), we call this function to replace\n those DAs with new ones that have the right stencil width.\n\n This could be made more efficient using some PETSc calls,\n but it only happens once so it seems not to be worth it.\n " q0 = self.q.copy() self._init_q_da(self.num_eqn, num_ghost) self.q = q0 if (self.aux is not None): aux0 = self.aux.copy() self._init_aux_da(self.num_aux, num_ghost) self.aux = aux0
def set_num_ghost(self, num_ghost): "\n This is a hack to deal with the fact that petsc4py\n doesn't allow us to change the stencil_width (num_ghost).\n\n Instead, we initially create DAs with stencil_width=0.\n Then, in solver.setup(), we call this function to replace\n those DAs with new ones that have the right stencil width.\n\n This could be made more efficient using some PETSc calls,\n but it only happens once so it seems not to be worth it.\n " q0 = self.q.copy() self._init_q_da(self.num_eqn, num_ghost) self.q = q0 if (self.aux is not None): aux0 = self.aux.copy() self._init_aux_da(self.num_aux, num_ghost) self.aux = aux0<|docstring|>This is a hack to deal with the fact that petsc4py doesn't allow us to change the stencil_width (num_ghost). Instead, we initially create DAs with stencil_width=0. Then, in solver.setup(), we call this function to replace those DAs with new ones that have the right stencil width. This could be made more efficient using some PETSc calls, but it only happens once so it seems not to be worth it.<|endoftext|>
924ef880c5d94186f6324e38765d85f4a2c6dfea1ecd47280121e71f67400a8c
def get_q_global(self): '\n Returns a copy of the global q array on process 0, otherwise returns None\n ' from petsc4py import PETSc q_natural = self.q_da.createNaturalVec() self.q_da.globalToNatural(self.gqVec, q_natural) (scatter, q0Vec) = PETSc.Scatter.toZero(q_natural) scatter.scatter(q_natural, q0Vec, False, PETSc.Scatter.Mode.FORWARD) rank = PETSc.COMM_WORLD.getRank() if (rank == 0): shape = self.patch.num_cells_global shape.insert(0, self.num_eqn) q0 = q0Vec.getArray().reshape(shape, order='F').copy() else: q0 = None scatter.destroy() q0Vec.destroy() return q0
Returns a copy of the global q array on process 0, otherwise returns None
src/petclaw/state.py
get_q_global
hadjimy/pyclaw
98
python
def get_q_global(self): '\n \n ' from petsc4py import PETSc q_natural = self.q_da.createNaturalVec() self.q_da.globalToNatural(self.gqVec, q_natural) (scatter, q0Vec) = PETSc.Scatter.toZero(q_natural) scatter.scatter(q_natural, q0Vec, False, PETSc.Scatter.Mode.FORWARD) rank = PETSc.COMM_WORLD.getRank() if (rank == 0): shape = self.patch.num_cells_global shape.insert(0, self.num_eqn) q0 = q0Vec.getArray().reshape(shape, order='F').copy() else: q0 = None scatter.destroy() q0Vec.destroy() return q0
def get_q_global(self): '\n \n ' from petsc4py import PETSc q_natural = self.q_da.createNaturalVec() self.q_da.globalToNatural(self.gqVec, q_natural) (scatter, q0Vec) = PETSc.Scatter.toZero(q_natural) scatter.scatter(q_natural, q0Vec, False, PETSc.Scatter.Mode.FORWARD) rank = PETSc.COMM_WORLD.getRank() if (rank == 0): shape = self.patch.num_cells_global shape.insert(0, self.num_eqn) q0 = q0Vec.getArray().reshape(shape, order='F').copy() else: q0 = None scatter.destroy() q0Vec.destroy() return q0<|docstring|>Returns a copy of the global q array on process 0, otherwise returns None<|endoftext|>
c00c7d42dbbeb789dc4fdcc0cb20a7fd2e432b2c7283136072ae76eecf8108da
def get_aux_global(self): '\n Returns a copy of the global aux array on process 0, otherwise returns None\n ' from petsc4py import PETSc aux_natural = self.aux_da.createNaturalVec() self.aux_da.globalToNatural(self.gauxVec, aux_natural) (scatter, aux0Vec) = PETSc.Scatter.toZero(aux_natural) scatter.scatter(aux_natural, aux0Vec, False, PETSc.Scatter.Mode.FORWARD) rank = PETSc.COMM_WORLD.getRank() if (rank == 0): shape = self.patch.num_cells_global shape.insert(0, self.num_aux) aux0 = aux0Vec.getArray().reshape(shape, order='F').copy() else: aux0 = None scatter.destroy() aux0Vec.destroy() return aux0
Returns a copy of the global aux array on process 0, otherwise returns None
src/petclaw/state.py
get_aux_global
hadjimy/pyclaw
98
python
def get_aux_global(self): '\n \n ' from petsc4py import PETSc aux_natural = self.aux_da.createNaturalVec() self.aux_da.globalToNatural(self.gauxVec, aux_natural) (scatter, aux0Vec) = PETSc.Scatter.toZero(aux_natural) scatter.scatter(aux_natural, aux0Vec, False, PETSc.Scatter.Mode.FORWARD) rank = PETSc.COMM_WORLD.getRank() if (rank == 0): shape = self.patch.num_cells_global shape.insert(0, self.num_aux) aux0 = aux0Vec.getArray().reshape(shape, order='F').copy() else: aux0 = None scatter.destroy() aux0Vec.destroy() return aux0
def get_aux_global(self): '\n \n ' from petsc4py import PETSc aux_natural = self.aux_da.createNaturalVec() self.aux_da.globalToNatural(self.gauxVec, aux_natural) (scatter, aux0Vec) = PETSc.Scatter.toZero(aux_natural) scatter.scatter(aux_natural, aux0Vec, False, PETSc.Scatter.Mode.FORWARD) rank = PETSc.COMM_WORLD.getRank() if (rank == 0): shape = self.patch.num_cells_global shape.insert(0, self.num_aux) aux0 = aux0Vec.getArray().reshape(shape, order='F').copy() else: aux0 = None scatter.destroy() aux0Vec.destroy() return aux0<|docstring|>Returns a copy of the global aux array on process 0, otherwise returns None<|endoftext|>
17bbd360fcc6b5f6dfe9a8d5dc3c071ce244dd9094395f271e2f76e10392084c
def __deepcopy__(self, memo={}): '\n Calls the pyclaw deepcopy function, but also copies the number of ghost cells \n ' result = super(State, self).__deepcopy__(memo) result.set_num_ghost(self.q_da.stencil_width) return result
Calls the pyclaw deepcopy function, but also copies the number of ghost cells
src/petclaw/state.py
__deepcopy__
hadjimy/pyclaw
98
python
def __deepcopy__(self, memo={}): '\n \n ' result = super(State, self).__deepcopy__(memo) result.set_num_ghost(self.q_da.stencil_width) return result
def __deepcopy__(self, memo={}): '\n \n ' result = super(State, self).__deepcopy__(memo) result.set_num_ghost(self.q_da.stencil_width) return result<|docstring|>Calls the pyclaw deepcopy function, but also copies the number of ghost cells<|endoftext|>
b08252fdf6bdf5bc62251053ec06c38f446e894940a54ea6f4308ffaf51895e7
def check(self, info): "\n Things to check: mu_cutoff should be between 0.99 and 1.0,\n domain of effective xsc / distribution should match,\n if identical particles distribution should only be for 0->muCutoff, otherwise -1->muCutoff\n distribution should be normalized\n If the cross section goes negative, also check that the pure Coulomb portion is large enough to 'win'\n :return:\n " return [] raise NotImplementedError
Things to check: mu_cutoff should be between 0.99 and 1.0, domain of effective xsc / distribution should match, if identical particles distribution should only be for 0->muCutoff, otherwise -1->muCutoff distribution should be normalized If the cross section goes negative, also check that the pure Coulomb portion is large enough to 'win' :return:
fudge/reactionData/doubleDifferentialCrossSection/chargedParticleElastic/nuclearPlusInterference.py
check
brown170/fudge
14
python
def check(self, info): "\n Things to check: mu_cutoff should be between 0.99 and 1.0,\n domain of effective xsc / distribution should match,\n if identical particles distribution should only be for 0->muCutoff, otherwise -1->muCutoff\n distribution should be normalized\n If the cross section goes negative, also check that the pure Coulomb portion is large enough to 'win'\n :return:\n " return [] raise NotImplementedError
def check(self, info): "\n Things to check: mu_cutoff should be between 0.99 and 1.0,\n domain of effective xsc / distribution should match,\n if identical particles distribution should only be for 0->muCutoff, otherwise -1->muCutoff\n distribution should be normalized\n If the cross section goes negative, also check that the pure Coulomb portion is large enough to 'win'\n :return:\n " return [] raise NotImplementedError<|docstring|>Things to check: mu_cutoff should be between 0.99 and 1.0, domain of effective xsc / distribution should match, if identical particles distribution should only be for 0->muCutoff, otherwise -1->muCutoff distribution should be normalized If the cross section goes negative, also check that the pure Coulomb portion is large enough to 'win' :return:<|endoftext|>
86875235d2405ffeb80d36e99e5045a2b72d31e02fd9dc9b0f083e058b8253dd
def dSigma_dMu(self, energy, accuracy=0.001, muMax=None): '\n Returns d(Sigma)/d(mu) at the specified incident energy.\n\n :param energy: Energy of the projectile.\n :param accuracy: Currently not used. Only need to be compatible with other *dSigma_dMu* methods.\n :param muMax: Slices the upper domain mu to this value.\n\n :return: d(Sigma)/d(mu) at *energy*.\n ' distribution = self.distribution.data.evaluate(energy) if (muMax is not None): distribution = distribution.domainSlice(domainMax=muMax) _dSigma_dMu = (self.crossSection.data.evaluate(energy) * distribution) _dSigma_dMu.axes = self.defaultAxes(self.crossSection.data.domainUnit) return _dSigma_dMu
Returns d(Sigma)/d(mu) at the specified incident energy. :param energy: Energy of the projectile. :param accuracy: Currently not used. Only need to be compatible with other *dSigma_dMu* methods. :param muMax: Slices the upper domain mu to this value. :return: d(Sigma)/d(mu) at *energy*.
fudge/reactionData/doubleDifferentialCrossSection/chargedParticleElastic/nuclearPlusInterference.py
dSigma_dMu
brown170/fudge
14
python
def dSigma_dMu(self, energy, accuracy=0.001, muMax=None): '\n Returns d(Sigma)/d(mu) at the specified incident energy.\n\n :param energy: Energy of the projectile.\n :param accuracy: Currently not used. Only need to be compatible with other *dSigma_dMu* methods.\n :param muMax: Slices the upper domain mu to this value.\n\n :return: d(Sigma)/d(mu) at *energy*.\n ' distribution = self.distribution.data.evaluate(energy) if (muMax is not None): distribution = distribution.domainSlice(domainMax=muMax) _dSigma_dMu = (self.crossSection.data.evaluate(energy) * distribution) _dSigma_dMu.axes = self.defaultAxes(self.crossSection.data.domainUnit) return _dSigma_dMu
def dSigma_dMu(self, energy, accuracy=0.001, muMax=None): '\n Returns d(Sigma)/d(mu) at the specified incident energy.\n\n :param energy: Energy of the projectile.\n :param accuracy: Currently not used. Only need to be compatible with other *dSigma_dMu* methods.\n :param muMax: Slices the upper domain mu to this value.\n\n :return: d(Sigma)/d(mu) at *energy*.\n ' distribution = self.distribution.data.evaluate(energy) if (muMax is not None): distribution = distribution.domainSlice(domainMax=muMax) _dSigma_dMu = (self.crossSection.data.evaluate(energy) * distribution) _dSigma_dMu.axes = self.defaultAxes(self.crossSection.data.domainUnit) return _dSigma_dMu<|docstring|>Returns d(Sigma)/d(mu) at the specified incident energy. :param energy: Energy of the projectile. :param accuracy: Currently not used. Only need to be compatible with other *dSigma_dMu* methods. :param muMax: Slices the upper domain mu to this value. :return: d(Sigma)/d(mu) at *energy*.<|endoftext|>
243a55e4cf3a19e139cee3e6483fdd46bfe1ff3e35d4b4eac806a69cda62d6a2
def evaluate(self, E, mu, phi=0.0): '\n :param E: incident energy\n :param mu: scattering angle cosine\n :return: differential cross section at E,mu (integrated over phi) in b/sr\n ' if self.identicalParticles: mu = abs(mu) if (mu > self.muCutoff): return 0 if (E < self.crossSection.data.domainMin): return 0 if (E > self.crossSection.data.domainMax): raise ValueError(('Attempted to evaluate at %s, outside of domain limit %s %s' % (E, self.crossSection.data.domainMax, self.crossSection.data.domainUnit))) angular = self.distribution.data.evaluate(E) return ((abs(self.crossSection.data.evaluate(E)) * angular.evaluate(mu)) / (2 * math.pi))
:param E: incident energy :param mu: scattering angle cosine :return: differential cross section at E,mu (integrated over phi) in b/sr
fudge/reactionData/doubleDifferentialCrossSection/chargedParticleElastic/nuclearPlusInterference.py
evaluate
brown170/fudge
14
python
def evaluate(self, E, mu, phi=0.0): '\n :param E: incident energy\n :param mu: scattering angle cosine\n :return: differential cross section at E,mu (integrated over phi) in b/sr\n ' if self.identicalParticles: mu = abs(mu) if (mu > self.muCutoff): return 0 if (E < self.crossSection.data.domainMin): return 0 if (E > self.crossSection.data.domainMax): raise ValueError(('Attempted to evaluate at %s, outside of domain limit %s %s' % (E, self.crossSection.data.domainMax, self.crossSection.data.domainUnit))) angular = self.distribution.data.evaluate(E) return ((abs(self.crossSection.data.evaluate(E)) * angular.evaluate(mu)) / (2 * math.pi))
def evaluate(self, E, mu, phi=0.0): '\n :param E: incident energy\n :param mu: scattering angle cosine\n :return: differential cross section at E,mu (integrated over phi) in b/sr\n ' if self.identicalParticles: mu = abs(mu) if (mu > self.muCutoff): return 0 if (E < self.crossSection.data.domainMin): return 0 if (E > self.crossSection.data.domainMax): raise ValueError(('Attempted to evaluate at %s, outside of domain limit %s %s' % (E, self.crossSection.data.domainMax, self.crossSection.data.domainUnit))) angular = self.distribution.data.evaluate(E) return ((abs(self.crossSection.data.evaluate(E)) * angular.evaluate(mu)) / (2 * math.pi))<|docstring|>:param E: incident energy :param mu: scattering angle cosine :return: differential cross section at E,mu (integrated over phi) in b/sr<|endoftext|>
979dd2e27479b7b4ddd8f1f543ae6a595aff4b32cf6e90ee92ac259fb91cec37
@overload(reduce) def numba_reduce(reduce_op, x, axis, keepdims=False) -> np.ndarray: 'Reduce an array along the given axes.\n\n Parameters\n ----------\n reduce_op\n The reduce operation to call for each element of the output array. The input to\n reduce_op is a flattened, contigous array representing the window upon which\n reduce_op operates. It returns a scalar.\n x : np.ndarray\n The array to reduce.\n axis : ArrayLike\n The axis along which to reduce. This can be multiple axes.\n keepdims : bool\n If ``True``, keep the dimensions along which the array was reduced. If ``False``\n squeeze the output array. Currently only ``True`` is supported.\n\n Returns\n -------\n out_array : np.ndarray\n The reduced array.\n\n ' @register_jitable def impl_keepdims(reduce_op, x, axis, keepdims=False): axis = np.atleast_1d(np.asarray(axis)) mask = np.zeros(x.ndim, dtype=np.bool8) mask[axis] = True original_shape = np.array(x.shape) squeezed_shape = original_shape[(~ mask)] new_axes = (- np.arange(1, (axis.size + 1))) x_work = np.moveaxis(x, axis, new_axes) x_work = np.ascontiguousarray(x_work) total_reduce = np.prod(original_shape[axis]) total_keep = np.prod(squeezed_shape) tmp_shape = to_fixed_tuple(np.array((total_keep, total_reduce)), 2) x_work = np.reshape(x_work, tmp_shape) result = np.empty((total_keep,), dtype=x_work.dtype) for idx in range(result.size): result[idx] = reduce_op(x_work[(idx, ...)]) new_shape = original_shape.copy() new_shape[axis] = 1 new_shape_tuple = to_fixed_tuple(new_shape, x.ndim) return np.reshape(result, new_shape_tuple) @register_jitable def impl_dropdims(reduce_op, x, axis, keepdims=False): axis = np.atleast_1d(np.asarray(axis)) if (axis.size > 1): raise NotImplementedError("Numba can't np.squeeze yet.") result = impl_keepdims(reduce_op, x, axis) result = np.moveaxis(result, axis, 0) return result[(0, ...)] if numba.literally(keepdims).literal_value: return impl_keepdims else: return impl_dropdims
Reduce an array along the given axes. Parameters ---------- reduce_op The reduce operation to call for each element of the output array. The input to reduce_op is a flattened, contigous array representing the window upon which reduce_op operates. It returns a scalar. x : np.ndarray The array to reduce. axis : ArrayLike The axis along which to reduce. This can be multiple axes. keepdims : bool If ``True``, keep the dimensions along which the array was reduced. If ``False`` squeeze the output array. Currently only ``True`` is supported. Returns ------- out_array : np.ndarray The reduced array.
skbot/transform/_utils.py
numba_reduce
FirefoxMetzger/ropy
6
python
@overload(reduce) def numba_reduce(reduce_op, x, axis, keepdims=False) -> np.ndarray: 'Reduce an array along the given axes.\n\n Parameters\n ----------\n reduce_op\n The reduce operation to call for each element of the output array. The input to\n reduce_op is a flattened, contigous array representing the window upon which\n reduce_op operates. It returns a scalar.\n x : np.ndarray\n The array to reduce.\n axis : ArrayLike\n The axis along which to reduce. This can be multiple axes.\n keepdims : bool\n If ``True``, keep the dimensions along which the array was reduced. If ``False``\n squeeze the output array. Currently only ``True`` is supported.\n\n Returns\n -------\n out_array : np.ndarray\n The reduced array.\n\n ' @register_jitable def impl_keepdims(reduce_op, x, axis, keepdims=False): axis = np.atleast_1d(np.asarray(axis)) mask = np.zeros(x.ndim, dtype=np.bool8) mask[axis] = True original_shape = np.array(x.shape) squeezed_shape = original_shape[(~ mask)] new_axes = (- np.arange(1, (axis.size + 1))) x_work = np.moveaxis(x, axis, new_axes) x_work = np.ascontiguousarray(x_work) total_reduce = np.prod(original_shape[axis]) total_keep = np.prod(squeezed_shape) tmp_shape = to_fixed_tuple(np.array((total_keep, total_reduce)), 2) x_work = np.reshape(x_work, tmp_shape) result = np.empty((total_keep,), dtype=x_work.dtype) for idx in range(result.size): result[idx] = reduce_op(x_work[(idx, ...)]) new_shape = original_shape.copy() new_shape[axis] = 1 new_shape_tuple = to_fixed_tuple(new_shape, x.ndim) return np.reshape(result, new_shape_tuple) @register_jitable def impl_dropdims(reduce_op, x, axis, keepdims=False): axis = np.atleast_1d(np.asarray(axis)) if (axis.size > 1): raise NotImplementedError("Numba can't np.squeeze yet.") result = impl_keepdims(reduce_op, x, axis) result = np.moveaxis(result, axis, 0) return result[(0, ...)] if numba.literally(keepdims).literal_value: return impl_keepdims else: return impl_dropdims
@overload(reduce) def numba_reduce(reduce_op, x, axis, keepdims=False) -> np.ndarray: 'Reduce an array along the given axes.\n\n Parameters\n ----------\n reduce_op\n The reduce operation to call for each element of the output array. The input to\n reduce_op is a flattened, contigous array representing the window upon which\n reduce_op operates. It returns a scalar.\n x : np.ndarray\n The array to reduce.\n axis : ArrayLike\n The axis along which to reduce. This can be multiple axes.\n keepdims : bool\n If ``True``, keep the dimensions along which the array was reduced. If ``False``\n squeeze the output array. Currently only ``True`` is supported.\n\n Returns\n -------\n out_array : np.ndarray\n The reduced array.\n\n ' @register_jitable def impl_keepdims(reduce_op, x, axis, keepdims=False): axis = np.atleast_1d(np.asarray(axis)) mask = np.zeros(x.ndim, dtype=np.bool8) mask[axis] = True original_shape = np.array(x.shape) squeezed_shape = original_shape[(~ mask)] new_axes = (- np.arange(1, (axis.size + 1))) x_work = np.moveaxis(x, axis, new_axes) x_work = np.ascontiguousarray(x_work) total_reduce = np.prod(original_shape[axis]) total_keep = np.prod(squeezed_shape) tmp_shape = to_fixed_tuple(np.array((total_keep, total_reduce)), 2) x_work = np.reshape(x_work, tmp_shape) result = np.empty((total_keep,), dtype=x_work.dtype) for idx in range(result.size): result[idx] = reduce_op(x_work[(idx, ...)]) new_shape = original_shape.copy() new_shape[axis] = 1 new_shape_tuple = to_fixed_tuple(new_shape, x.ndim) return np.reshape(result, new_shape_tuple) @register_jitable def impl_dropdims(reduce_op, x, axis, keepdims=False): axis = np.atleast_1d(np.asarray(axis)) if (axis.size > 1): raise NotImplementedError("Numba can't np.squeeze yet.") result = impl_keepdims(reduce_op, x, axis) result = np.moveaxis(result, axis, 0) return result[(0, ...)] if numba.literally(keepdims).literal_value: return impl_keepdims else: return impl_dropdims<|docstring|>Reduce an array along the given axes. Parameters ---------- reduce_op The reduce operation to call for each element of the output array. The input to reduce_op is a flattened, contigous array representing the window upon which reduce_op operates. It returns a scalar. x : np.ndarray The array to reduce. axis : ArrayLike The axis along which to reduce. This can be multiple axes. keepdims : bool If ``True``, keep the dimensions along which the array was reduced. If ``False`` squeeze the output array. Currently only ``True`` is supported. Returns ------- out_array : np.ndarray The reduced array.<|endoftext|>
74dc81df9b353e24c455a2e537cc1ed2414176e5c1c5e64ac1868f202372fe38
@numba.jit(nopython=True, cache=True) def _vector_project_impl(a: ArrayLike, b: ArrayLike) -> np.ndarray: 'Implementation of vector_project\n\n See vector_project for documentation. This function differs in that it\n assumes that axis=-1.\n\n Notes\n -----\n This function exists to help numba with caching.\n\n ' numerator = reduce(np.sum, (a * b), axis=(- 1), keepdims=True) denominator = reduce(np.sum, (b * b), axis=(- 1), keepdims=True) return ((numerator / denominator) * b)
Implementation of vector_project See vector_project for documentation. This function differs in that it assumes that axis=-1. Notes ----- This function exists to help numba with caching.
skbot/transform/_utils.py
_vector_project_impl
FirefoxMetzger/ropy
6
python
@numba.jit(nopython=True, cache=True) def _vector_project_impl(a: ArrayLike, b: ArrayLike) -> np.ndarray: 'Implementation of vector_project\n\n See vector_project for documentation. This function differs in that it\n assumes that axis=-1.\n\n Notes\n -----\n This function exists to help numba with caching.\n\n ' numerator = reduce(np.sum, (a * b), axis=(- 1), keepdims=True) denominator = reduce(np.sum, (b * b), axis=(- 1), keepdims=True) return ((numerator / denominator) * b)
@numba.jit(nopython=True, cache=True) def _vector_project_impl(a: ArrayLike, b: ArrayLike) -> np.ndarray: 'Implementation of vector_project\n\n See vector_project for documentation. This function differs in that it\n assumes that axis=-1.\n\n Notes\n -----\n This function exists to help numba with caching.\n\n ' numerator = reduce(np.sum, (a * b), axis=(- 1), keepdims=True) denominator = reduce(np.sum, (b * b), axis=(- 1), keepdims=True) return ((numerator / denominator) * b)<|docstring|>Implementation of vector_project See vector_project for documentation. This function differs in that it assumes that axis=-1. Notes ----- This function exists to help numba with caching.<|endoftext|>
573736495314bc2498368f63c667a6f0bd8c80166ea2daa658e9e6dc469e6f49
def vector_project(a: ArrayLike, b: ArrayLike, axis: int=(- 1)) -> np.ndarray: 'Returns the components of each a along each b.\n\n Parameters\n ----------\n a : ArrayLike\n A batch of vectors to be projected.\n b : ArrayLike\n A batch of vectors that are being projected onto.\n axis : int\n The data axis of the batches, i.e., along which axis to compute.\n\n Returns\n -------\n result : ndarray\n A batch of vectors of shape [a.batch_dims, b.batch_dims].\n\n\n Notes\n -----\n The function assumes that a and b are broadcastable.\n\n ' a = np.moveaxis(a, axis, (- 1)) b = np.moveaxis(b, axis, (- 1)) a = np.ascontiguousarray(a).view() b = np.ascontiguousarray(b).view() a.flags.writeable = False b.flags.writeable = False result = _vector_project_impl(a, b) result = np.moveaxis(result, (- 1), axis) return result
Returns the components of each a along each b. Parameters ---------- a : ArrayLike A batch of vectors to be projected. b : ArrayLike A batch of vectors that are being projected onto. axis : int The data axis of the batches, i.e., along which axis to compute. Returns ------- result : ndarray A batch of vectors of shape [a.batch_dims, b.batch_dims]. Notes ----- The function assumes that a and b are broadcastable.
skbot/transform/_utils.py
vector_project
FirefoxMetzger/ropy
6
python
def vector_project(a: ArrayLike, b: ArrayLike, axis: int=(- 1)) -> np.ndarray: 'Returns the components of each a along each b.\n\n Parameters\n ----------\n a : ArrayLike\n A batch of vectors to be projected.\n b : ArrayLike\n A batch of vectors that are being projected onto.\n axis : int\n The data axis of the batches, i.e., along which axis to compute.\n\n Returns\n -------\n result : ndarray\n A batch of vectors of shape [a.batch_dims, b.batch_dims].\n\n\n Notes\n -----\n The function assumes that a and b are broadcastable.\n\n ' a = np.moveaxis(a, axis, (- 1)) b = np.moveaxis(b, axis, (- 1)) a = np.ascontiguousarray(a).view() b = np.ascontiguousarray(b).view() a.flags.writeable = False b.flags.writeable = False result = _vector_project_impl(a, b) result = np.moveaxis(result, (- 1), axis) return result
def vector_project(a: ArrayLike, b: ArrayLike, axis: int=(- 1)) -> np.ndarray: 'Returns the components of each a along each b.\n\n Parameters\n ----------\n a : ArrayLike\n A batch of vectors to be projected.\n b : ArrayLike\n A batch of vectors that are being projected onto.\n axis : int\n The data axis of the batches, i.e., along which axis to compute.\n\n Returns\n -------\n result : ndarray\n A batch of vectors of shape [a.batch_dims, b.batch_dims].\n\n\n Notes\n -----\n The function assumes that a and b are broadcastable.\n\n ' a = np.moveaxis(a, axis, (- 1)) b = np.moveaxis(b, axis, (- 1)) a = np.ascontiguousarray(a).view() b = np.ascontiguousarray(b).view() a.flags.writeable = False b.flags.writeable = False result = _vector_project_impl(a, b) result = np.moveaxis(result, (- 1), axis) return result<|docstring|>Returns the components of each a along each b. Parameters ---------- a : ArrayLike A batch of vectors to be projected. b : ArrayLike A batch of vectors that are being projected onto. axis : int The data axis of the batches, i.e., along which axis to compute. Returns ------- result : ndarray A batch of vectors of shape [a.batch_dims, b.batch_dims]. Notes ----- The function assumes that a and b are broadcastable.<|endoftext|>
3c08aff622b6b920a49f78760fc4c1ba75749695f6e4e6145583c67d6e06a407
def scalar_project(a: ArrayLike, b: ArrayLike, *, axis: int=(- 1), keepdims=False) -> np.ndarray: 'Returns the length of the components of each a along each b.' projected = vector_project(a, b, axis=axis) magnitude = np.linalg.norm(projected, axis=axis, keepdims=keepdims) sign = np.sign(np.sum((projected * b), axis=axis, keepdims=keepdims)) return (sign * magnitude)
Returns the length of the components of each a along each b.
skbot/transform/_utils.py
scalar_project
FirefoxMetzger/ropy
6
python
def scalar_project(a: ArrayLike, b: ArrayLike, *, axis: int=(- 1), keepdims=False) -> np.ndarray: projected = vector_project(a, b, axis=axis) magnitude = np.linalg.norm(projected, axis=axis, keepdims=keepdims) sign = np.sign(np.sum((projected * b), axis=axis, keepdims=keepdims)) return (sign * magnitude)
def scalar_project(a: ArrayLike, b: ArrayLike, *, axis: int=(- 1), keepdims=False) -> np.ndarray: projected = vector_project(a, b, axis=axis) magnitude = np.linalg.norm(projected, axis=axis, keepdims=keepdims) sign = np.sign(np.sum((projected * b), axis=axis, keepdims=keepdims)) return (sign * magnitude)<|docstring|>Returns the length of the components of each a along each b.<|endoftext|>
76eadd1f27737a14e63e7d50b252a889e4e7e72314a877d09e843838a8c4698a
def angle_between(vec_a: ArrayLike, vec_b: ArrayLike, *, axis: int=(- 1), eps=1e-10) -> np.ndarray: 'Computes the angle from a to b (in a right-handed frame)\n\n Notes\n -----\n Implementation is based on this post:\n https://scicomp.stackexchange.com/a/27694\n ' vec_a = np.asarray(vec_a)[(None, :)] vec_b = np.asarray(vec_b)[(None, :)] if (axis >= 0): axis += 1 len_c = np.linalg.norm((vec_a - vec_b), axis=axis) len_a = np.linalg.norm(vec_a, axis=axis) len_b = np.linalg.norm(vec_b, axis=axis) mask = (len_a >= len_b) tmp = np.where(mask, len_a, len_b) np.putmask(len_b, (~ mask), len_a) len_a = tmp mask = (len_c > len_b) mu = np.where(mask, (len_b - (len_a - len_c)), (len_c - (len_a - len_b))) mask = (np.abs(mu) < eps) mu = np.where(mask, 0, mu) numerator = (((len_a - len_b) + len_c) * mu) denominator = ((len_a + (len_b + len_c)) * ((len_a - len_c) + len_b)) mask = (denominator > eps) angle = np.divide(numerator, denominator, where=mask) np.sqrt(angle, out=angle) np.arctan(angle, out=angle) angle *= 2 np.putmask(angle, (~ mask), np.pi) return angle[0]
Computes the angle from a to b (in a right-handed frame) Notes ----- Implementation is based on this post: https://scicomp.stackexchange.com/a/27694
skbot/transform/_utils.py
angle_between
FirefoxMetzger/ropy
6
python
def angle_between(vec_a: ArrayLike, vec_b: ArrayLike, *, axis: int=(- 1), eps=1e-10) -> np.ndarray: 'Computes the angle from a to b (in a right-handed frame)\n\n Notes\n -----\n Implementation is based on this post:\n https://scicomp.stackexchange.com/a/27694\n ' vec_a = np.asarray(vec_a)[(None, :)] vec_b = np.asarray(vec_b)[(None, :)] if (axis >= 0): axis += 1 len_c = np.linalg.norm((vec_a - vec_b), axis=axis) len_a = np.linalg.norm(vec_a, axis=axis) len_b = np.linalg.norm(vec_b, axis=axis) mask = (len_a >= len_b) tmp = np.where(mask, len_a, len_b) np.putmask(len_b, (~ mask), len_a) len_a = tmp mask = (len_c > len_b) mu = np.where(mask, (len_b - (len_a - len_c)), (len_c - (len_a - len_b))) mask = (np.abs(mu) < eps) mu = np.where(mask, 0, mu) numerator = (((len_a - len_b) + len_c) * mu) denominator = ((len_a + (len_b + len_c)) * ((len_a - len_c) + len_b)) mask = (denominator > eps) angle = np.divide(numerator, denominator, where=mask) np.sqrt(angle, out=angle) np.arctan(angle, out=angle) angle *= 2 np.putmask(angle, (~ mask), np.pi) return angle[0]
def angle_between(vec_a: ArrayLike, vec_b: ArrayLike, *, axis: int=(- 1), eps=1e-10) -> np.ndarray: 'Computes the angle from a to b (in a right-handed frame)\n\n Notes\n -----\n Implementation is based on this post:\n https://scicomp.stackexchange.com/a/27694\n ' vec_a = np.asarray(vec_a)[(None, :)] vec_b = np.asarray(vec_b)[(None, :)] if (axis >= 0): axis += 1 len_c = np.linalg.norm((vec_a - vec_b), axis=axis) len_a = np.linalg.norm(vec_a, axis=axis) len_b = np.linalg.norm(vec_b, axis=axis) mask = (len_a >= len_b) tmp = np.where(mask, len_a, len_b) np.putmask(len_b, (~ mask), len_a) len_a = tmp mask = (len_c > len_b) mu = np.where(mask, (len_b - (len_a - len_c)), (len_c - (len_a - len_b))) mask = (np.abs(mu) < eps) mu = np.where(mask, 0, mu) numerator = (((len_a - len_b) + len_c) * mu) denominator = ((len_a + (len_b + len_c)) * ((len_a - len_c) + len_b)) mask = (denominator > eps) angle = np.divide(numerator, denominator, where=mask) np.sqrt(angle, out=angle) np.arctan(angle, out=angle) angle *= 2 np.putmask(angle, (~ mask), np.pi) return angle[0]<|docstring|>Computes the angle from a to b (in a right-handed frame) Notes ----- Implementation is based on this post: https://scicomp.stackexchange.com/a/27694<|endoftext|>
252a200444db85248f225185654fe6cb11be2577d100f5c0a344f765af80af8c
def create_netlist(self): ' Calls all functions related to the generation of the netlist ' self.add_pins() self.determine_tx_mults() self.add_ptx() self.create_ptx()
Calls all functions related to the generation of the netlist
compiler/pgates/pinv.py
create_netlist
bkoppelmann/OpenRAM
43
python
def create_netlist(self): ' ' self.add_pins() self.determine_tx_mults() self.add_ptx() self.create_ptx()
def create_netlist(self): ' ' self.add_pins() self.determine_tx_mults() self.add_ptx() self.create_ptx()<|docstring|>Calls all functions related to the generation of the netlist<|endoftext|>
6f536ec79f90ed4b28a4f64d919b123330e8f0e84419d1d017fc8b9b3e7a58e1
def create_layout(self): ' Calls all functions related to the generation of the layout ' self.setup_layout_constants() self.route_supply_rails() self.place_ptx() self.add_well_contacts() self.extend_wells(self.well_pos) self.connect_rails() self.route_input_gate(self.pmos_inst, self.nmos_inst, self.output_pos.y, 'A', position='farleft') self.route_outputs()
Calls all functions related to the generation of the layout
compiler/pgates/pinv.py
create_layout
bkoppelmann/OpenRAM
43
python
def create_layout(self): ' ' self.setup_layout_constants() self.route_supply_rails() self.place_ptx() self.add_well_contacts() self.extend_wells(self.well_pos) self.connect_rails() self.route_input_gate(self.pmos_inst, self.nmos_inst, self.output_pos.y, 'A', position='farleft') self.route_outputs()
def create_layout(self): ' ' self.setup_layout_constants() self.route_supply_rails() self.place_ptx() self.add_well_contacts() self.extend_wells(self.well_pos) self.connect_rails() self.route_input_gate(self.pmos_inst, self.nmos_inst, self.output_pos.y, 'A', position='farleft') self.route_outputs()<|docstring|>Calls all functions related to the generation of the layout<|endoftext|>
d4ed5c3647b22a66073e4e9d9578883a16c683239317fc5ee1ac98edfb724852
def add_pins(self): ' Adds pins for spice netlist ' pin_list = ['A', 'Z', 'vdd', 'gnd'] dir_list = ['INPUT', 'OUTPUT', 'POWER', 'GROUND'] self.add_pin_list(pin_list, dir_list)
Adds pins for spice netlist
compiler/pgates/pinv.py
add_pins
bkoppelmann/OpenRAM
43
python
def add_pins(self): ' ' pin_list = ['A', 'Z', 'vdd', 'gnd'] dir_list = ['INPUT', 'OUTPUT', 'POWER', 'GROUND'] self.add_pin_list(pin_list, dir_list)
def add_pins(self): ' ' pin_list = ['A', 'Z', 'vdd', 'gnd'] dir_list = ['INPUT', 'OUTPUT', 'POWER', 'GROUND'] self.add_pin_list(pin_list, dir_list)<|docstring|>Adds pins for spice netlist<|endoftext|>
8b334fb41da727bb812228b7ce5054821fef8278a8113cbc661e3a32866f3c5e
def determine_tx_mults(self): '\n Determines the number of fingers needed to achieve the size within\n the height constraint. This may fail if the user has a tight height.\n ' if OPTS.netlist_only: self.tx_mults = 1 self.nmos_width = (self.nmos_size * drc('minwidth_tx')) self.pmos_width = (self.pmos_size * drc('minwidth_tx')) return nmos = factory.create(module_type='ptx', tx_type='nmos') pmos = factory.create(module_type='ptx', width=drc('minwidth_tx'), tx_type='pmos') tx_height = (nmos.poly_height + pmos.poly_height) min_channel = max((contact.poly.width + self.m1_space), (contact.poly.width + (2 * drc('poly_to_active')))) extra_contact_space = max((- nmos.get_pin('D').by()), 0) self.top_bottom_space = max((((0.5 * self.m1_width) + self.m1_space) + extra_contact_space), drc('poly_extend_active'), self.poly_space) total_height = ((tx_height + min_channel) + (2 * self.top_bottom_space)) debug.check((self.height > total_height), 'Cell height {0} too small for simple min height {1}.'.format(self.height, total_height)) tx_height_available = ((self.height - min_channel) - (2 * self.top_bottom_space)) nmos_height_available = ((0.5 * tx_height_available) - (0.5 * drc('poly_to_poly'))) pmos_height_available = ((0.5 * tx_height_available) - (0.5 * drc('poly_to_poly'))) debug.info(2, 'Height avail {0:.4f} PMOS {1:.4f} NMOS {2:.4f}'.format(tx_height_available, nmos_height_available, pmos_height_available)) self.nmos_width = (self.nmos_size * drc('minwidth_tx')) self.pmos_width = (self.pmos_size * drc('minwidth_tx')) nmos_required_mults = max(int(ceil((self.nmos_width / nmos_height_available))), 1) pmos_required_mults = max(int(ceil((self.pmos_width / pmos_height_available))), 1) self.tx_mults = max(nmos_required_mults, pmos_required_mults) self.nmos_width = round_to_grid((self.nmos_width / self.tx_mults)) debug.check((self.nmos_width >= drc('minwidth_tx')), 'Cannot finger NMOS transistors to fit cell height.') self.pmos_width = round_to_grid((self.pmos_width / self.tx_mults)) debug.check((self.pmos_width >= drc('minwidth_tx')), 'Cannot finger PMOS transistors to fit cell height.')
Determines the number of fingers needed to achieve the size within the height constraint. This may fail if the user has a tight height.
compiler/pgates/pinv.py
determine_tx_mults
bkoppelmann/OpenRAM
43
python
def determine_tx_mults(self): '\n Determines the number of fingers needed to achieve the size within\n the height constraint. This may fail if the user has a tight height.\n ' if OPTS.netlist_only: self.tx_mults = 1 self.nmos_width = (self.nmos_size * drc('minwidth_tx')) self.pmos_width = (self.pmos_size * drc('minwidth_tx')) return nmos = factory.create(module_type='ptx', tx_type='nmos') pmos = factory.create(module_type='ptx', width=drc('minwidth_tx'), tx_type='pmos') tx_height = (nmos.poly_height + pmos.poly_height) min_channel = max((contact.poly.width + self.m1_space), (contact.poly.width + (2 * drc('poly_to_active')))) extra_contact_space = max((- nmos.get_pin('D').by()), 0) self.top_bottom_space = max((((0.5 * self.m1_width) + self.m1_space) + extra_contact_space), drc('poly_extend_active'), self.poly_space) total_height = ((tx_height + min_channel) + (2 * self.top_bottom_space)) debug.check((self.height > total_height), 'Cell height {0} too small for simple min height {1}.'.format(self.height, total_height)) tx_height_available = ((self.height - min_channel) - (2 * self.top_bottom_space)) nmos_height_available = ((0.5 * tx_height_available) - (0.5 * drc('poly_to_poly'))) pmos_height_available = ((0.5 * tx_height_available) - (0.5 * drc('poly_to_poly'))) debug.info(2, 'Height avail {0:.4f} PMOS {1:.4f} NMOS {2:.4f}'.format(tx_height_available, nmos_height_available, pmos_height_available)) self.nmos_width = (self.nmos_size * drc('minwidth_tx')) self.pmos_width = (self.pmos_size * drc('minwidth_tx')) nmos_required_mults = max(int(ceil((self.nmos_width / nmos_height_available))), 1) pmos_required_mults = max(int(ceil((self.pmos_width / pmos_height_available))), 1) self.tx_mults = max(nmos_required_mults, pmos_required_mults) self.nmos_width = round_to_grid((self.nmos_width / self.tx_mults)) debug.check((self.nmos_width >= drc('minwidth_tx')), 'Cannot finger NMOS transistors to fit cell height.') self.pmos_width = round_to_grid((self.pmos_width / self.tx_mults)) debug.check((self.pmos_width >= drc('minwidth_tx')), 'Cannot finger PMOS transistors to fit cell height.')
def determine_tx_mults(self): '\n Determines the number of fingers needed to achieve the size within\n the height constraint. This may fail if the user has a tight height.\n ' if OPTS.netlist_only: self.tx_mults = 1 self.nmos_width = (self.nmos_size * drc('minwidth_tx')) self.pmos_width = (self.pmos_size * drc('minwidth_tx')) return nmos = factory.create(module_type='ptx', tx_type='nmos') pmos = factory.create(module_type='ptx', width=drc('minwidth_tx'), tx_type='pmos') tx_height = (nmos.poly_height + pmos.poly_height) min_channel = max((contact.poly.width + self.m1_space), (contact.poly.width + (2 * drc('poly_to_active')))) extra_contact_space = max((- nmos.get_pin('D').by()), 0) self.top_bottom_space = max((((0.5 * self.m1_width) + self.m1_space) + extra_contact_space), drc('poly_extend_active'), self.poly_space) total_height = ((tx_height + min_channel) + (2 * self.top_bottom_space)) debug.check((self.height > total_height), 'Cell height {0} too small for simple min height {1}.'.format(self.height, total_height)) tx_height_available = ((self.height - min_channel) - (2 * self.top_bottom_space)) nmos_height_available = ((0.5 * tx_height_available) - (0.5 * drc('poly_to_poly'))) pmos_height_available = ((0.5 * tx_height_available) - (0.5 * drc('poly_to_poly'))) debug.info(2, 'Height avail {0:.4f} PMOS {1:.4f} NMOS {2:.4f}'.format(tx_height_available, nmos_height_available, pmos_height_available)) self.nmos_width = (self.nmos_size * drc('minwidth_tx')) self.pmos_width = (self.pmos_size * drc('minwidth_tx')) nmos_required_mults = max(int(ceil((self.nmos_width / nmos_height_available))), 1) pmos_required_mults = max(int(ceil((self.pmos_width / pmos_height_available))), 1) self.tx_mults = max(nmos_required_mults, pmos_required_mults) self.nmos_width = round_to_grid((self.nmos_width / self.tx_mults)) debug.check((self.nmos_width >= drc('minwidth_tx')), 'Cannot finger NMOS transistors to fit cell height.') self.pmos_width = round_to_grid((self.pmos_width / self.tx_mults)) debug.check((self.pmos_width >= drc('minwidth_tx')), 'Cannot finger PMOS transistors to fit cell height.')<|docstring|>Determines the number of fingers needed to achieve the size within the height constraint. This may fail if the user has a tight height.<|endoftext|>
5ba9e7429159d12c66ce9998d41919b38d88e4c69343e6247ef9d280dbfe6148
def setup_layout_constants(self): '\n Pre-compute some handy layout parameters.\n ' self.well_width = (((self.pmos.active_width + self.pmos.active_contact.width) + drc('active_to_body_active')) + (2 * drc('well_enclosure_active'))) self.width = self.well_width
Pre-compute some handy layout parameters.
compiler/pgates/pinv.py
setup_layout_constants
bkoppelmann/OpenRAM
43
python
def setup_layout_constants(self): '\n \n ' self.well_width = (((self.pmos.active_width + self.pmos.active_contact.width) + drc('active_to_body_active')) + (2 * drc('well_enclosure_active'))) self.width = self.well_width
def setup_layout_constants(self): '\n \n ' self.well_width = (((self.pmos.active_width + self.pmos.active_contact.width) + drc('active_to_body_active')) + (2 * drc('well_enclosure_active'))) self.width = self.well_width<|docstring|>Pre-compute some handy layout parameters.<|endoftext|>
4baab1bc32b1d06e3e34a351e60ce50811624ae3ecc69393d9cd8e6aa50338fb
def add_ptx(self): ' Create the PMOS and NMOS transistors. ' self.nmos = factory.create(module_type='ptx', width=self.nmos_width, mults=self.tx_mults, tx_type='nmos', connect_poly=True, connect_active=True) self.add_mod(self.nmos) self.pmos = factory.create(module_type='ptx', width=self.pmos_width, mults=self.tx_mults, tx_type='pmos', connect_poly=True, connect_active=True) self.add_mod(self.pmos)
Create the PMOS and NMOS transistors.
compiler/pgates/pinv.py
add_ptx
bkoppelmann/OpenRAM
43
python
def add_ptx(self): ' ' self.nmos = factory.create(module_type='ptx', width=self.nmos_width, mults=self.tx_mults, tx_type='nmos', connect_poly=True, connect_active=True) self.add_mod(self.nmos) self.pmos = factory.create(module_type='ptx', width=self.pmos_width, mults=self.tx_mults, tx_type='pmos', connect_poly=True, connect_active=True) self.add_mod(self.pmos)
def add_ptx(self): ' ' self.nmos = factory.create(module_type='ptx', width=self.nmos_width, mults=self.tx_mults, tx_type='nmos', connect_poly=True, connect_active=True) self.add_mod(self.nmos) self.pmos = factory.create(module_type='ptx', width=self.pmos_width, mults=self.tx_mults, tx_type='pmos', connect_poly=True, connect_active=True) self.add_mod(self.pmos)<|docstring|>Create the PMOS and NMOS transistors.<|endoftext|>
672001ad485092bbaf577eae1e21c18330d65095b804641c26707cb0560e6b5a
def route_supply_rails(self): ' Add vdd/gnd rails to the top and bottom. ' self.add_layout_pin_rect_center(text='gnd', layer='metal1', offset=vector((0.5 * self.width), 0), width=self.width) self.add_layout_pin_rect_center(text='vdd', layer='metal1', offset=vector((0.5 * self.width), self.height), width=self.width)
Add vdd/gnd rails to the top and bottom.
compiler/pgates/pinv.py
route_supply_rails
bkoppelmann/OpenRAM
43
python
def route_supply_rails(self): ' ' self.add_layout_pin_rect_center(text='gnd', layer='metal1', offset=vector((0.5 * self.width), 0), width=self.width) self.add_layout_pin_rect_center(text='vdd', layer='metal1', offset=vector((0.5 * self.width), self.height), width=self.width)
def route_supply_rails(self): ' ' self.add_layout_pin_rect_center(text='gnd', layer='metal1', offset=vector((0.5 * self.width), 0), width=self.width) self.add_layout_pin_rect_center(text='vdd', layer='metal1', offset=vector((0.5 * self.width), self.height), width=self.width)<|docstring|>Add vdd/gnd rails to the top and bottom.<|endoftext|>
a23871b277b17c81ea3ab5a6fef9fdbd742dceede28d1720ac92950dcae87058
def create_ptx(self): '\n Create the PMOS and NMOS netlist.\n ' self.pmos_inst = self.add_inst(name='pinv_pmos', mod=self.pmos) self.connect_inst(['Z', 'A', 'vdd', 'vdd']) self.nmos_inst = self.add_inst(name='pinv_nmos', mod=self.nmos) self.connect_inst(['Z', 'A', 'gnd', 'gnd'])
Create the PMOS and NMOS netlist.
compiler/pgates/pinv.py
create_ptx
bkoppelmann/OpenRAM
43
python
def create_ptx(self): '\n \n ' self.pmos_inst = self.add_inst(name='pinv_pmos', mod=self.pmos) self.connect_inst(['Z', 'A', 'vdd', 'vdd']) self.nmos_inst = self.add_inst(name='pinv_nmos', mod=self.nmos) self.connect_inst(['Z', 'A', 'gnd', 'gnd'])
def create_ptx(self): '\n \n ' self.pmos_inst = self.add_inst(name='pinv_pmos', mod=self.pmos) self.connect_inst(['Z', 'A', 'vdd', 'vdd']) self.nmos_inst = self.add_inst(name='pinv_nmos', mod=self.nmos) self.connect_inst(['Z', 'A', 'gnd', 'gnd'])<|docstring|>Create the PMOS and NMOS netlist.<|endoftext|>
ae6d65621b8057ee202ff4605fd4c0131383127c079924ef45c0d2b587dd7042
def place_ptx(self): '\n Place PMOS and NMOS to the layout at the upper-most and lowest position\n to provide maximum routing in channel\n ' self.pmos_pos = (self.pmos.active_offset.scale(1, 0) + vector(0, ((self.height - self.pmos.active_height) - self.top_bottom_space))) self.pmos_inst.place(self.pmos_pos) self.nmos_pos = (self.nmos.active_offset.scale(1, 0) + vector(0, self.top_bottom_space)) self.nmos_inst.place(self.nmos_pos) pmos_drain_pos = self.pmos_inst.get_pin('D').ll() nmos_drain_pos = self.nmos_inst.get_pin('D').ul() self.output_pos = vector(0, (0.5 * (pmos_drain_pos.y + nmos_drain_pos.y))) self.well_pos = vector(0, self.nmos_inst.uy())
Place PMOS and NMOS to the layout at the upper-most and lowest position to provide maximum routing in channel
compiler/pgates/pinv.py
place_ptx
bkoppelmann/OpenRAM
43
python
def place_ptx(self): '\n Place PMOS and NMOS to the layout at the upper-most and lowest position\n to provide maximum routing in channel\n ' self.pmos_pos = (self.pmos.active_offset.scale(1, 0) + vector(0, ((self.height - self.pmos.active_height) - self.top_bottom_space))) self.pmos_inst.place(self.pmos_pos) self.nmos_pos = (self.nmos.active_offset.scale(1, 0) + vector(0, self.top_bottom_space)) self.nmos_inst.place(self.nmos_pos) pmos_drain_pos = self.pmos_inst.get_pin('D').ll() nmos_drain_pos = self.nmos_inst.get_pin('D').ul() self.output_pos = vector(0, (0.5 * (pmos_drain_pos.y + nmos_drain_pos.y))) self.well_pos = vector(0, self.nmos_inst.uy())
def place_ptx(self): '\n Place PMOS and NMOS to the layout at the upper-most and lowest position\n to provide maximum routing in channel\n ' self.pmos_pos = (self.pmos.active_offset.scale(1, 0) + vector(0, ((self.height - self.pmos.active_height) - self.top_bottom_space))) self.pmos_inst.place(self.pmos_pos) self.nmos_pos = (self.nmos.active_offset.scale(1, 0) + vector(0, self.top_bottom_space)) self.nmos_inst.place(self.nmos_pos) pmos_drain_pos = self.pmos_inst.get_pin('D').ll() nmos_drain_pos = self.nmos_inst.get_pin('D').ul() self.output_pos = vector(0, (0.5 * (pmos_drain_pos.y + nmos_drain_pos.y))) self.well_pos = vector(0, self.nmos_inst.uy())<|docstring|>Place PMOS and NMOS to the layout at the upper-most and lowest position to provide maximum routing in channel<|endoftext|>
3d53cd397739b262006db68417c32a6015cfcd2acc2c8cc4e2d9992f6afe91aa
def route_outputs(self): '\n Route the output (drains) together.\n Optionally, routes output to edge.\n ' nmos_drain_pin = self.nmos_inst.get_pin('D') pmos_drain_pin = self.pmos_inst.get_pin('D') nmos_drain_pos = nmos_drain_pin.bc() pmos_drain_pos = vector(nmos_drain_pos.x, pmos_drain_pin.uc().y) self.add_path('metal1', [nmos_drain_pos, pmos_drain_pos]) mid_drain_offset = vector(nmos_drain_pos.x, self.output_pos.y) if self.route_output: output_offset = (mid_drain_offset.scale(0, 1) + vector(self.width, 0)) self.add_layout_pin_segment_center(text='Z', layer='metal1', start=mid_drain_offset, end=output_offset) else: self.add_layout_pin_rect_center(text='Z', layer='metal1', offset=(mid_drain_offset + vector((0.5 * self.m1_width), 0)))
Route the output (drains) together. Optionally, routes output to edge.
compiler/pgates/pinv.py
route_outputs
bkoppelmann/OpenRAM
43
python
def route_outputs(self): '\n Route the output (drains) together.\n Optionally, routes output to edge.\n ' nmos_drain_pin = self.nmos_inst.get_pin('D') pmos_drain_pin = self.pmos_inst.get_pin('D') nmos_drain_pos = nmos_drain_pin.bc() pmos_drain_pos = vector(nmos_drain_pos.x, pmos_drain_pin.uc().y) self.add_path('metal1', [nmos_drain_pos, pmos_drain_pos]) mid_drain_offset = vector(nmos_drain_pos.x, self.output_pos.y) if self.route_output: output_offset = (mid_drain_offset.scale(0, 1) + vector(self.width, 0)) self.add_layout_pin_segment_center(text='Z', layer='metal1', start=mid_drain_offset, end=output_offset) else: self.add_layout_pin_rect_center(text='Z', layer='metal1', offset=(mid_drain_offset + vector((0.5 * self.m1_width), 0)))
def route_outputs(self): '\n Route the output (drains) together.\n Optionally, routes output to edge.\n ' nmos_drain_pin = self.nmos_inst.get_pin('D') pmos_drain_pin = self.pmos_inst.get_pin('D') nmos_drain_pos = nmos_drain_pin.bc() pmos_drain_pos = vector(nmos_drain_pos.x, pmos_drain_pin.uc().y) self.add_path('metal1', [nmos_drain_pos, pmos_drain_pos]) mid_drain_offset = vector(nmos_drain_pos.x, self.output_pos.y) if self.route_output: output_offset = (mid_drain_offset.scale(0, 1) + vector(self.width, 0)) self.add_layout_pin_segment_center(text='Z', layer='metal1', start=mid_drain_offset, end=output_offset) else: self.add_layout_pin_rect_center(text='Z', layer='metal1', offset=(mid_drain_offset + vector((0.5 * self.m1_width), 0)))<|docstring|>Route the output (drains) together. Optionally, routes output to edge.<|endoftext|>
2df0d58ff5458e07b61c123d90ab69ea25ae5b84c9c58b4fc2f8623502eae3f2
def add_well_contacts(self): ' Add n/p well taps to the layout and connect to supplies ' self.add_nwell_contact(self.pmos, self.pmos_pos) self.add_pwell_contact(self.nmos, self.nmos_pos)
Add n/p well taps to the layout and connect to supplies
compiler/pgates/pinv.py
add_well_contacts
bkoppelmann/OpenRAM
43
python
def add_well_contacts(self): ' ' self.add_nwell_contact(self.pmos, self.pmos_pos) self.add_pwell_contact(self.nmos, self.nmos_pos)
def add_well_contacts(self): ' ' self.add_nwell_contact(self.pmos, self.pmos_pos) self.add_pwell_contact(self.nmos, self.nmos_pos)<|docstring|>Add n/p well taps to the layout and connect to supplies<|endoftext|>
862746fd90fb43f388e61f07acf68d380f6934a0a5147418ee2439f634db6016
def connect_rails(self): ' Connect the nmos and pmos to its respective power rails ' self.connect_pin_to_rail(self.nmos_inst, 'S', 'gnd') self.connect_pin_to_rail(self.pmos_inst, 'S', 'vdd')
Connect the nmos and pmos to its respective power rails
compiler/pgates/pinv.py
connect_rails
bkoppelmann/OpenRAM
43
python
def connect_rails(self): ' ' self.connect_pin_to_rail(self.nmos_inst, 'S', 'gnd') self.connect_pin_to_rail(self.pmos_inst, 'S', 'vdd')
def connect_rails(self): ' ' self.connect_pin_to_rail(self.nmos_inst, 'S', 'gnd') self.connect_pin_to_rail(self.pmos_inst, 'S', 'vdd')<|docstring|>Connect the nmos and pmos to its respective power rails<|endoftext|>
b71b80e66f5844876066813e45cc2d57b520a566bc452458354c670fb539960c
def analytical_power(self, corner, load): 'Returns dynamic and leakage power. Results in nW' c_eff = self.calculate_effective_capacitance(load) freq = spice['default_event_frequency'] power_dyn = self.calc_dynamic_power(corner, c_eff, freq) power_leak = spice['inv_leakage'] total_power = self.return_power(power_dyn, power_leak) return total_power
Returns dynamic and leakage power. Results in nW
compiler/pgates/pinv.py
analytical_power
bkoppelmann/OpenRAM
43
python
def analytical_power(self, corner, load): c_eff = self.calculate_effective_capacitance(load) freq = spice['default_event_frequency'] power_dyn = self.calc_dynamic_power(corner, c_eff, freq) power_leak = spice['inv_leakage'] total_power = self.return_power(power_dyn, power_leak) return total_power
def analytical_power(self, corner, load): c_eff = self.calculate_effective_capacitance(load) freq = spice['default_event_frequency'] power_dyn = self.calc_dynamic_power(corner, c_eff, freq) power_leak = spice['inv_leakage'] total_power = self.return_power(power_dyn, power_leak) return total_power<|docstring|>Returns dynamic and leakage power. Results in nW<|endoftext|>
ec8c704d1e2afd745fcdeb9fdb058745e6b3d8d2fb9ba3e03592a2b5909e37dc
def calculate_effective_capacitance(self, load): 'Computes effective capacitance. Results in fF' c_load = load c_para = (spice['min_tx_drain_c'] * (self.nmos_size / parameter['min_tx_size'])) transition_prob = 0.5 return (transition_prob * (c_load + c_para))
Computes effective capacitance. Results in fF
compiler/pgates/pinv.py
calculate_effective_capacitance
bkoppelmann/OpenRAM
43
python
def calculate_effective_capacitance(self, load): c_load = load c_para = (spice['min_tx_drain_c'] * (self.nmos_size / parameter['min_tx_size'])) transition_prob = 0.5 return (transition_prob * (c_load + c_para))
def calculate_effective_capacitance(self, load): c_load = load c_para = (spice['min_tx_drain_c'] * (self.nmos_size / parameter['min_tx_size'])) transition_prob = 0.5 return (transition_prob * (c_load + c_para))<|docstring|>Computes effective capacitance. Results in fF<|endoftext|>
4fe262aa95455d736b6e4168fc1b2369a6061c331b7dd4d445680d2d565ecc6b
def input_load(self): '\n Return the capacitance of the gate connection in generic capacitive\n units relative to the minimum width of a transistor\n ' return (self.nmos_size + self.pmos_size)
Return the capacitance of the gate connection in generic capacitive units relative to the minimum width of a transistor
compiler/pgates/pinv.py
input_load
bkoppelmann/OpenRAM
43
python
def input_load(self): '\n Return the capacitance of the gate connection in generic capacitive\n units relative to the minimum width of a transistor\n ' return (self.nmos_size + self.pmos_size)
def input_load(self): '\n Return the capacitance of the gate connection in generic capacitive\n units relative to the minimum width of a transistor\n ' return (self.nmos_size + self.pmos_size)<|docstring|>Return the capacitance of the gate connection in generic capacitive units relative to the minimum width of a transistor<|endoftext|>
04b40b93186ef203d02ca1a7ef2370cd8a80a656eb78990f5d51c4d073814370
def get_stage_effort(self, cout, inp_is_rise=True): '\n Returns an object representing the parameters for delay in tau units.\n Optional is_rise refers to the input direction rise/fall.\n Input inverted by this stage.\n ' parasitic_delay = 1 return logical_effort.logical_effort(self.name, self.size, self.input_load(), cout, parasitic_delay, (not inp_is_rise))
Returns an object representing the parameters for delay in tau units. Optional is_rise refers to the input direction rise/fall. Input inverted by this stage.
compiler/pgates/pinv.py
get_stage_effort
bkoppelmann/OpenRAM
43
python
def get_stage_effort(self, cout, inp_is_rise=True): '\n Returns an object representing the parameters for delay in tau units.\n Optional is_rise refers to the input direction rise/fall.\n Input inverted by this stage.\n ' parasitic_delay = 1 return logical_effort.logical_effort(self.name, self.size, self.input_load(), cout, parasitic_delay, (not inp_is_rise))
def get_stage_effort(self, cout, inp_is_rise=True): '\n Returns an object representing the parameters for delay in tau units.\n Optional is_rise refers to the input direction rise/fall.\n Input inverted by this stage.\n ' parasitic_delay = 1 return logical_effort.logical_effort(self.name, self.size, self.input_load(), cout, parasitic_delay, (not inp_is_rise))<|docstring|>Returns an object representing the parameters for delay in tau units. Optional is_rise refers to the input direction rise/fall. Input inverted by this stage.<|endoftext|>
abc62848fd64eac0d59682c4396c3af03d9becf92c23fdb2b1cf42d326b2a1de
def build_graph(self, graph, inst_name, port_nets): '\n Adds edges based on inputs/outputs.\n Overrides base class function.\n ' self.add_graph_edges(graph, port_nets)
Adds edges based on inputs/outputs. Overrides base class function.
compiler/pgates/pinv.py
build_graph
bkoppelmann/OpenRAM
43
python
def build_graph(self, graph, inst_name, port_nets): '\n Adds edges based on inputs/outputs.\n Overrides base class function.\n ' self.add_graph_edges(graph, port_nets)
def build_graph(self, graph, inst_name, port_nets): '\n Adds edges based on inputs/outputs.\n Overrides base class function.\n ' self.add_graph_edges(graph, port_nets)<|docstring|>Adds edges based on inputs/outputs. Overrides base class function.<|endoftext|>
f47c55d22c93a6b88e73250ab33c6b8d9ae5f5cdaad36fbac8f89a27912e28d8
def canWinNim(self, n): '\n :type n: int\n :rtype: bool\n ' return ((n % 4) != 0)
:type n: int :rtype: bool
cs15211/NimGame.py
canWinNim
JulyKikuAkita/PythonPrac
1
python
def canWinNim(self, n): '\n :type n: int\n :rtype: bool\n ' return ((n % 4) != 0)
def canWinNim(self, n): '\n :type n: int\n :rtype: bool\n ' return ((n % 4) != 0)<|docstring|>:type n: int :rtype: bool<|endoftext|>
c4023c0a06413da6a1a9805a9f53f91ba5bb1d08e6171db9cd1aaa73caf9196c
def run(model: Text, endpoints: Text, connector: Text=None, credentials: Text=None, **kwargs: Dict): 'Runs a Rasa model.\n\n Args:\n model: Path to model archive.\n endpoints: Path to endpoints file.\n connector: Connector which should be use (overwrites `credentials`\n field).\n credentials: Path to channel credentials file.\n **kwargs: Additional arguments which are passed to\n `rasa.core.run.serve_application`.\n\n ' import rasa.core.run import rasa.nlu.run from rasa.core.utils import AvailableEndpoints model_path = get_model(model) if (not model_path): logger.error('No model found. Train a model before running the server using `rasa train`.') return (core_path, nlu_path) = get_model_subdirectories(model_path) _endpoints = AvailableEndpoints.read_endpoints(endpoints) if ((not connector) and (not credentials)): channel = 'cmdline' logger.info('No chat connector configured, falling back to the command line. Use `rasa configure channel` to connectthe bot to e.g. facebook messenger.') else: channel = connector if os.path.exists(core_path): kwargs = minimal_kwargs(kwargs, rasa.core.run.serve_application) rasa.core.run.serve_application(core_path, nlu_path, channel=channel, credentials=credentials, endpoints=_endpoints, **kwargs) elif os.path.exists(nlu_path): rasa.nlu.run.run_cmdline(nlu_path) shutil.rmtree(model_path)
Runs a Rasa model. Args: model: Path to model archive. endpoints: Path to endpoints file. connector: Connector which should be use (overwrites `credentials` field). credentials: Path to channel credentials file. **kwargs: Additional arguments which are passed to `rasa.core.run.serve_application`.
rasa/run.py
run
anoop2503/rasa
1
python
def run(model: Text, endpoints: Text, connector: Text=None, credentials: Text=None, **kwargs: Dict): 'Runs a Rasa model.\n\n Args:\n model: Path to model archive.\n endpoints: Path to endpoints file.\n connector: Connector which should be use (overwrites `credentials`\n field).\n credentials: Path to channel credentials file.\n **kwargs: Additional arguments which are passed to\n `rasa.core.run.serve_application`.\n\n ' import rasa.core.run import rasa.nlu.run from rasa.core.utils import AvailableEndpoints model_path = get_model(model) if (not model_path): logger.error('No model found. Train a model before running the server using `rasa train`.') return (core_path, nlu_path) = get_model_subdirectories(model_path) _endpoints = AvailableEndpoints.read_endpoints(endpoints) if ((not connector) and (not credentials)): channel = 'cmdline' logger.info('No chat connector configured, falling back to the command line. Use `rasa configure channel` to connectthe bot to e.g. facebook messenger.') else: channel = connector if os.path.exists(core_path): kwargs = minimal_kwargs(kwargs, rasa.core.run.serve_application) rasa.core.run.serve_application(core_path, nlu_path, channel=channel, credentials=credentials, endpoints=_endpoints, **kwargs) elif os.path.exists(nlu_path): rasa.nlu.run.run_cmdline(nlu_path) shutil.rmtree(model_path)
def run(model: Text, endpoints: Text, connector: Text=None, credentials: Text=None, **kwargs: Dict): 'Runs a Rasa model.\n\n Args:\n model: Path to model archive.\n endpoints: Path to endpoints file.\n connector: Connector which should be use (overwrites `credentials`\n field).\n credentials: Path to channel credentials file.\n **kwargs: Additional arguments which are passed to\n `rasa.core.run.serve_application`.\n\n ' import rasa.core.run import rasa.nlu.run from rasa.core.utils import AvailableEndpoints model_path = get_model(model) if (not model_path): logger.error('No model found. Train a model before running the server using `rasa train`.') return (core_path, nlu_path) = get_model_subdirectories(model_path) _endpoints = AvailableEndpoints.read_endpoints(endpoints) if ((not connector) and (not credentials)): channel = 'cmdline' logger.info('No chat connector configured, falling back to the command line. Use `rasa configure channel` to connectthe bot to e.g. facebook messenger.') else: channel = connector if os.path.exists(core_path): kwargs = minimal_kwargs(kwargs, rasa.core.run.serve_application) rasa.core.run.serve_application(core_path, nlu_path, channel=channel, credentials=credentials, endpoints=_endpoints, **kwargs) elif os.path.exists(nlu_path): rasa.nlu.run.run_cmdline(nlu_path) shutil.rmtree(model_path)<|docstring|>Runs a Rasa model. Args: model: Path to model archive. endpoints: Path to endpoints file. connector: Connector which should be use (overwrites `credentials` field). credentials: Path to channel credentials file. **kwargs: Additional arguments which are passed to `rasa.core.run.serve_application`.<|endoftext|>
1ef43a9aa4e912eb1277d684275e796ff4b5cc1bcf89c949b5336ddf3309afe2
def _download_and_index_cqa(dl_manager, name): 'Downloads CQA and returns it, indexed by id, for joining with Cos-E.' downloaded_files = dl_manager.download_and_extract({'cqa_train': (_CQA_V1_11_URL_TRAIN if (name == 'v1.11') else _CQA_V1_0_URL_TRAIN), 'cqa_dev': (_CQA_V1_11_URL_DEV if (name == 'v1.11') else _CQA_V1_0_URL_DEV), 'cqa_test': (_CQA_V1_11_URL_TEST if (name == 'v1.11') else _CQA_V1_0_URL_TEST)}) cqa_splits = ['cqa_train', 'cqa_dev'] cqa_complete = [] for split in cqa_splits: with open(downloaded_files[split], encoding='utf-8') as f: for (_, line) in enumerate(f): d = json.loads(line) cqa_complete.append(d) cqa_indexed = {} for d in cqa_complete: cqa_indexed[d['id']] = d return cqa_indexed
Downloads CQA and returns it, indexed by id, for joining with Cos-E.
datasets/cos_e/cos_e.py
_download_and_index_cqa
patrickvonplaten/datasets-1
10,608
python
def _download_and_index_cqa(dl_manager, name): downloaded_files = dl_manager.download_and_extract({'cqa_train': (_CQA_V1_11_URL_TRAIN if (name == 'v1.11') else _CQA_V1_0_URL_TRAIN), 'cqa_dev': (_CQA_V1_11_URL_DEV if (name == 'v1.11') else _CQA_V1_0_URL_DEV), 'cqa_test': (_CQA_V1_11_URL_TEST if (name == 'v1.11') else _CQA_V1_0_URL_TEST)}) cqa_splits = ['cqa_train', 'cqa_dev'] cqa_complete = [] for split in cqa_splits: with open(downloaded_files[split], encoding='utf-8') as f: for (_, line) in enumerate(f): d = json.loads(line) cqa_complete.append(d) cqa_indexed = {} for d in cqa_complete: cqa_indexed[d['id']] = d return cqa_indexed
def _download_and_index_cqa(dl_manager, name): downloaded_files = dl_manager.download_and_extract({'cqa_train': (_CQA_V1_11_URL_TRAIN if (name == 'v1.11') else _CQA_V1_0_URL_TRAIN), 'cqa_dev': (_CQA_V1_11_URL_DEV if (name == 'v1.11') else _CQA_V1_0_URL_DEV), 'cqa_test': (_CQA_V1_11_URL_TEST if (name == 'v1.11') else _CQA_V1_0_URL_TEST)}) cqa_splits = ['cqa_train', 'cqa_dev'] cqa_complete = [] for split in cqa_splits: with open(downloaded_files[split], encoding='utf-8') as f: for (_, line) in enumerate(f): d = json.loads(line) cqa_complete.append(d) cqa_indexed = {} for d in cqa_complete: cqa_indexed[d['id']] = d return cqa_indexed<|docstring|>Downloads CQA and returns it, indexed by id, for joining with Cos-E.<|endoftext|>
c716d558b01e744307a341b51694766154b4029c4816fd45ced566f76a2ea4f9
def _get_choices_and_answer(cqa): 'Returns choices and the answer from a cqa example.' choices = [] answer_key = cqa['answerKey'] answer = None for choice in cqa['question']['choices']: choices.append(choice['text']) if (answer_key == choice['label']): answer = choice['text'] return (choices, answer)
Returns choices and the answer from a cqa example.
datasets/cos_e/cos_e.py
_get_choices_and_answer
patrickvonplaten/datasets-1
10,608
python
def _get_choices_and_answer(cqa): choices = [] answer_key = cqa['answerKey'] answer = None for choice in cqa['question']['choices']: choices.append(choice['text']) if (answer_key == choice['label']): answer = choice['text'] return (choices, answer)
def _get_choices_and_answer(cqa): choices = [] answer_key = cqa['answerKey'] answer = None for choice in cqa['question']['choices']: choices.append(choice['text']) if (answer_key == choice['label']): answer = choice['text'] return (choices, answer)<|docstring|>Returns choices and the answer from a cqa example.<|endoftext|>
a156c8cc152940c3f4f00510ad3b6f4f42268cffbd0efb76363c949c19ccea18
def __init__(self, **kwargs): '\n\n Args:\n **kwargs: keyword arguments forwarded to super.\n ' super(CosEConfig, self).__init__(**kwargs)
Args: **kwargs: keyword arguments forwarded to super.
datasets/cos_e/cos_e.py
__init__
patrickvonplaten/datasets-1
10,608
python
def __init__(self, **kwargs): '\n\n Args:\n **kwargs: keyword arguments forwarded to super.\n ' super(CosEConfig, self).__init__(**kwargs)
def __init__(self, **kwargs): '\n\n Args:\n **kwargs: keyword arguments forwarded to super.\n ' super(CosEConfig, self).__init__(**kwargs)<|docstring|>Args: **kwargs: keyword arguments forwarded to super.<|endoftext|>
2473c37756c7448216363486eb0f1051880572bfe83390ec424cc262f3a043ae
def _split_generators(self, dl_manager): 'Returns SplitGenerators.' cqa_indexed = _download_and_index_cqa(dl_manager, self.config.name) if (self.config.name == 'v1.11'): files = dl_manager.download_and_extract({'dev': [(_COS_E_URL + 'v1.11/cose_dev_v1.11_processed.jsonl')], 'train': [(_COS_E_URL + 'v1.11/cose_train_v1.11_processed.jsonl')]}) elif (self.config.name == 'v1.0'): files = dl_manager.download_and_extract({'dev': [(_COS_E_URL + 'v1.0/cose_dev_v1.0_processed.jsonl')], 'train': [(_COS_E_URL + 'v1.0/cose_train_v1.0_processed.jsonl')]}) else: raise ValueError('Unknown config name') return [datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={'files': files['train'], 'cqa_indexed': cqa_indexed}), datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={'files': files['dev'], 'cqa_indexed': cqa_indexed})]
Returns SplitGenerators.
datasets/cos_e/cos_e.py
_split_generators
patrickvonplaten/datasets-1
10,608
python
def _split_generators(self, dl_manager): cqa_indexed = _download_and_index_cqa(dl_manager, self.config.name) if (self.config.name == 'v1.11'): files = dl_manager.download_and_extract({'dev': [(_COS_E_URL + 'v1.11/cose_dev_v1.11_processed.jsonl')], 'train': [(_COS_E_URL + 'v1.11/cose_train_v1.11_processed.jsonl')]}) elif (self.config.name == 'v1.0'): files = dl_manager.download_and_extract({'dev': [(_COS_E_URL + 'v1.0/cose_dev_v1.0_processed.jsonl')], 'train': [(_COS_E_URL + 'v1.0/cose_train_v1.0_processed.jsonl')]}) else: raise ValueError('Unknown config name') return [datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={'files': files['train'], 'cqa_indexed': cqa_indexed}), datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={'files': files['dev'], 'cqa_indexed': cqa_indexed})]
def _split_generators(self, dl_manager): cqa_indexed = _download_and_index_cqa(dl_manager, self.config.name) if (self.config.name == 'v1.11'): files = dl_manager.download_and_extract({'dev': [(_COS_E_URL + 'v1.11/cose_dev_v1.11_processed.jsonl')], 'train': [(_COS_E_URL + 'v1.11/cose_train_v1.11_processed.jsonl')]}) elif (self.config.name == 'v1.0'): files = dl_manager.download_and_extract({'dev': [(_COS_E_URL + 'v1.0/cose_dev_v1.0_processed.jsonl')], 'train': [(_COS_E_URL + 'v1.0/cose_train_v1.0_processed.jsonl')]}) else: raise ValueError('Unknown config name') return [datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={'files': files['train'], 'cqa_indexed': cqa_indexed}), datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={'files': files['dev'], 'cqa_indexed': cqa_indexed})]<|docstring|>Returns SplitGenerators.<|endoftext|>
26678a646017df27722953631e1980f1b0d115721dd7f570a03c31b11cdb034a
def _generate_examples(self, files, **kwargs): 'Yields examples.' cqa_indexed = kwargs['cqa_indexed'] for filepath in files: with open(filepath, encoding='utf-8') as f: for line in f: cos = json.loads(line) cqa = cqa_indexed[cos['id']] (choices, answer) = _get_choices_and_answer(cqa) (yield (cos['id'], {'id': cos['id'], 'question': cqa['question']['stem'], 'choices': choices, 'answer': answer, 'abstractive_explanation': cos['explanation']['open-ended'], 'extractive_explanation': cos['explanation']['selected']}))
Yields examples.
datasets/cos_e/cos_e.py
_generate_examples
patrickvonplaten/datasets-1
10,608
python
def _generate_examples(self, files, **kwargs): cqa_indexed = kwargs['cqa_indexed'] for filepath in files: with open(filepath, encoding='utf-8') as f: for line in f: cos = json.loads(line) cqa = cqa_indexed[cos['id']] (choices, answer) = _get_choices_and_answer(cqa) (yield (cos['id'], {'id': cos['id'], 'question': cqa['question']['stem'], 'choices': choices, 'answer': answer, 'abstractive_explanation': cos['explanation']['open-ended'], 'extractive_explanation': cos['explanation']['selected']}))
def _generate_examples(self, files, **kwargs): cqa_indexed = kwargs['cqa_indexed'] for filepath in files: with open(filepath, encoding='utf-8') as f: for line in f: cos = json.loads(line) cqa = cqa_indexed[cos['id']] (choices, answer) = _get_choices_and_answer(cqa) (yield (cos['id'], {'id': cos['id'], 'question': cqa['question']['stem'], 'choices': choices, 'answer': answer, 'abstractive_explanation': cos['explanation']['open-ended'], 'extractive_explanation': cos['explanation']['selected']}))<|docstring|>Yields examples.<|endoftext|>
d1eea9b6a28435243b14e2f42979cf71374707c2db2d257cbc296c9f247d0f82
def __init__(self, *args, **kwargs): "\n eg: \n ConfigWrapper(\n `function_name`='function_name', \n `a`=el_a, \n `b`=el_b, \n `c`=el_c, \n `d`=el_d, \n `el_1`, `el_2`, `el_3`\n )\n " self.function_name: str = None self.config_dict: Dict = {} self.config_list: List = [] self.function_name = (kwargs.get('function_name') if kwargs.get('function_name', False) else None) assert (self.function_name is not None), f"function_name must be passed as kwargs [eg. function_name='function_name' ]" for (k, v) in kwargs.items(): self.config_dict[k] = v for el in args: self.config_list.append(el)
eg: ConfigWrapper( `function_name`='function_name', `a`=el_a, `b`=el_b, `c`=el_c, `d`=el_d, `el_1`, `el_2`, `el_3` )
src/thesis/utils/cache.py
__init__
emanuelevivoli/2021-Master-Thesis-UNIFI
1
python
def __init__(self, *args, **kwargs): "\n eg: \n ConfigWrapper(\n `function_name`='function_name', \n `a`=el_a, \n `b`=el_b, \n `c`=el_c, \n `d`=el_d, \n `el_1`, `el_2`, `el_3`\n )\n " self.function_name: str = None self.config_dict: Dict = {} self.config_list: List = [] self.function_name = (kwargs.get('function_name') if kwargs.get('function_name', False) else None) assert (self.function_name is not None), f"function_name must be passed as kwargs [eg. function_name='function_name' ]" for (k, v) in kwargs.items(): self.config_dict[k] = v for el in args: self.config_list.append(el)
def __init__(self, *args, **kwargs): "\n eg: \n ConfigWrapper(\n `function_name`='function_name', \n `a`=el_a, \n `b`=el_b, \n `c`=el_c, \n `d`=el_d, \n `el_1`, `el_2`, `el_3`\n )\n " self.function_name: str = None self.config_dict: Dict = {} self.config_list: List = [] self.function_name = (kwargs.get('function_name') if kwargs.get('function_name', False) else None) assert (self.function_name is not None), f"function_name must be passed as kwargs [eg. function_name='function_name' ]" for (k, v) in kwargs.items(): self.config_dict[k] = v for el in args: self.config_list.append(el)<|docstring|>eg: ConfigWrapper( `function_name`='function_name', `a`=el_a, `b`=el_b, `c`=el_c, `d`=el_d, `el_1`, `el_2`, `el_3` )<|endoftext|>
8ff16916067f0fbd4ba6bd6dec8ea04121d805acb5e84b6f22e341fdc5f01a01
@classmethod async def add_global_group_cool_down_event(cls, group_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_group_type)).where((CoolDownEvent.group_id == group_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.global_group_type, group_id=group_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
:return: result = 0: Success result = -1: Error
omega_miya/database/model/cooldown.py
add_global_group_cool_down_event
rinrini001/omega-miya
120
python
@classmethod async def add_global_group_cool_down_event(cls, group_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_group_type)).where((CoolDownEvent.group_id == group_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.global_group_type, group_id=group_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
@classmethod async def add_global_group_cool_down_event(cls, group_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_group_type)).where((CoolDownEvent.group_id == group_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.global_group_type, group_id=group_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result<|docstring|>:return: result = 0: Success result = -1: Error<|endoftext|>
62ee746ff2478af182815d5678589d249ff63adf9dc2eff329daeeb0326069ff
@classmethod async def check_global_group_cool_down_event(cls, group_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_group_type)).where((CoolDownEvent.group_id == group_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
:return: result = 2: Success with CoolDown Event expired result = 1: Success with CoolDown Event exist result = 0: Success with CoolDown Event not found result = -1: Error
omega_miya/database/model/cooldown.py
check_global_group_cool_down_event
rinrini001/omega-miya
120
python
@classmethod async def check_global_group_cool_down_event(cls, group_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_group_type)).where((CoolDownEvent.group_id == group_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
@classmethod async def check_global_group_cool_down_event(cls, group_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_group_type)).where((CoolDownEvent.group_id == group_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result<|docstring|>:return: result = 2: Success with CoolDown Event expired result = 1: Success with CoolDown Event exist result = 0: Success with CoolDown Event not found result = -1: Error<|endoftext|>
20c141ccb33ceaa5d55a68b537bd0bc210bdf37806fdada84f734d47eb09c121
@classmethod async def add_global_user_cool_down_event(cls, user_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_user_type)).where((CoolDownEvent.user_id == user_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.global_user_type, user_id=user_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
:return: result = 0: Success result = -1: Error
omega_miya/database/model/cooldown.py
add_global_user_cool_down_event
rinrini001/omega-miya
120
python
@classmethod async def add_global_user_cool_down_event(cls, user_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_user_type)).where((CoolDownEvent.user_id == user_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.global_user_type, user_id=user_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
@classmethod async def add_global_user_cool_down_event(cls, user_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_user_type)).where((CoolDownEvent.user_id == user_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.global_user_type, user_id=user_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result<|docstring|>:return: result = 0: Success result = -1: Error<|endoftext|>
b652490ba33fd997fcf2be55a099dd7089ad974f4c5eecc79b79d22fce2350a1
@classmethod async def check_global_user_cool_down_event(cls, user_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_user_type)).where((CoolDownEvent.user_id == user_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
:return: result = 2: Success with CoolDown Event expired result = 1: Success with CoolDown Event exist result = 0: Success with CoolDown Event not found result = -1: Error
omega_miya/database/model/cooldown.py
check_global_user_cool_down_event
rinrini001/omega-miya
120
python
@classmethod async def check_global_user_cool_down_event(cls, user_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_user_type)).where((CoolDownEvent.user_id == user_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
@classmethod async def check_global_user_cool_down_event(cls, user_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.global_user_type)).where((CoolDownEvent.user_id == user_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result<|docstring|>:return: result = 2: Success with CoolDown Event expired result = 1: Success with CoolDown Event exist result = 0: Success with CoolDown Event not found result = -1: Error<|endoftext|>
8d31644b05b94100b9a5c896706d6d5f8ca7b05fb3e28298f88e8f3b9db05606
@classmethod async def add_group_cool_down_event(cls, plugin: str, group_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.group_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.group_id == group_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.group_type, plugin=plugin, group_id=group_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
:return: result = 0: Success result = -1: Error
omega_miya/database/model/cooldown.py
add_group_cool_down_event
rinrini001/omega-miya
120
python
@classmethod async def add_group_cool_down_event(cls, plugin: str, group_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.group_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.group_id == group_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.group_type, plugin=plugin, group_id=group_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
@classmethod async def add_group_cool_down_event(cls, plugin: str, group_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.group_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.group_id == group_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.group_type, plugin=plugin, group_id=group_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result<|docstring|>:return: result = 0: Success result = -1: Error<|endoftext|>
cad2f514d514e8df966ad2b8a9ea3d71aea807a5cd8de6d0770596dc43f87f43
@classmethod async def check_group_cool_down_event(cls, plugin: str, group_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.group_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.group_id == group_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
:return: result = 2: Success with CoolDown Event expired result = 1: Success with CoolDown Event exist result = 0: Success with CoolDown Event not found result = -1: Error
omega_miya/database/model/cooldown.py
check_group_cool_down_event
rinrini001/omega-miya
120
python
@classmethod async def check_group_cool_down_event(cls, plugin: str, group_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.group_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.group_id == group_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
@classmethod async def check_group_cool_down_event(cls, plugin: str, group_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.group_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.group_id == group_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result<|docstring|>:return: result = 2: Success with CoolDown Event expired result = 1: Success with CoolDown Event exist result = 0: Success with CoolDown Event not found result = -1: Error<|endoftext|>
23bbf0abf48b1cd0067c1d5dda33dd6b75191eabf42a47c0e443677b88883e89
@classmethod async def add_user_cool_down_event(cls, plugin: str, user_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.user_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.user_id == user_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.user_type, plugin=plugin, user_id=user_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
:return: result = 0: Success result = -1: Error
omega_miya/database/model/cooldown.py
add_user_cool_down_event
rinrini001/omega-miya
120
python
@classmethod async def add_user_cool_down_event(cls, plugin: str, user_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.user_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.user_id == user_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.user_type, plugin=plugin, user_id=user_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
@classmethod async def add_user_cool_down_event(cls, plugin: str, user_id: int, stop_at: datetime, description: str=None) -> Result.IntResult: '\n :return:\n result = 0: Success\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: try: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.user_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.user_id == user_id)))) exist_event = session_result.scalar_one() exist_event.stop_at = stop_at exist_event.description = description exist_event.updated_at = datetime.now() result = Result.IntResult(error=False, info='Success upgraded', result=0) except NoResultFound: new_event = CoolDownEvent(event_type=cls.user_type, plugin=plugin, user_id=user_id, stop_at=stop_at, description=description, created_at=datetime.now()) session.add(new_event) result = Result.IntResult(error=False, info='Success added', result=0) (await session.commit()) except MultipleResultsFound: (await session.rollback()) result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: (await session.rollback()) result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result<|docstring|>:return: result = 0: Success result = -1: Error<|endoftext|>
db7b09e68aa26b42d76322c8488fa68f626a31672d2a3441ddcf05a5e711e966
@classmethod async def check_user_cool_down_event(cls, plugin: str, user_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.user_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.user_id == user_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
:return: result = 2: Success with CoolDown Event expired result = 1: Success with CoolDown Event exist result = 0: Success with CoolDown Event not found result = -1: Error
omega_miya/database/model/cooldown.py
check_user_cool_down_event
rinrini001/omega-miya
120
python
@classmethod async def check_user_cool_down_event(cls, plugin: str, user_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.user_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.user_id == user_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result
@classmethod async def check_user_cool_down_event(cls, plugin: str, user_id: int) -> Result.IntResult: '\n :return:\n result = 2: Success with CoolDown Event expired\n result = 1: Success with CoolDown Event exist\n result = 0: Success with CoolDown Event not found\n result = -1: Error\n ' async_session = BaseDB().get_async_session() async with async_session() as session: async with session.begin(): try: session_result = (await session.execute(select(CoolDownEvent).where((CoolDownEvent.event_type == cls.user_type)).where((CoolDownEvent.plugin == plugin)).where((CoolDownEvent.user_id == user_id)))) event = session_result.scalar_one() stop_at = event.stop_at if (datetime.now() > stop_at): result = Result.IntResult(error=False, info='Success, CoolDown expired', result=2) else: result = Result.IntResult(error=False, info=f'CoolDown until: {stop_at}', result=1) except NoResultFound: result = Result.IntResult(error=False, info='NoResultFound', result=0) except MultipleResultsFound: result = Result.IntResult(error=True, info='MultipleResultsFound', result=(- 1)) except Exception as e: result = Result.IntResult(error=True, info=repr(e), result=(- 1)) return result<|docstring|>:return: result = 2: Success with CoolDown Event expired result = 1: Success with CoolDown Event exist result = 0: Success with CoolDown Event not found result = -1: Error<|endoftext|>
b4b093d9e156dd49962b5349468d1f058a5ec5e6db86539339fad7b7b859edcd
def initiate(self): '\n Initiates the database driver classes\n :return: None\n ' if (self.driver == 'mysql'): from .mysql.connection import MysqlConnection from .mysql.queryRunner import MysqlQueryRunner from .mysql.queryBuilder import MysqlQueryBuilder from .mysql.tableBuilder import MysqlTableBuilder __ = MysqlConnection(self.credentials) self.connection = __.make() self.execute = MysqlQueryRunner(self.connection) self.builder = MysqlQueryBuilder() self.tableScheme = MysqlTableBuilder else: raise ValueError('Database driver not found.') pass
Initiates the database driver classes :return: None
lib/FluentDB/core/FluentDB.py
initiate
olavoasantos/FluentDB
2
python
def initiate(self): '\n Initiates the database driver classes\n :return: None\n ' if (self.driver == 'mysql'): from .mysql.connection import MysqlConnection from .mysql.queryRunner import MysqlQueryRunner from .mysql.queryBuilder import MysqlQueryBuilder from .mysql.tableBuilder import MysqlTableBuilder __ = MysqlConnection(self.credentials) self.connection = __.make() self.execute = MysqlQueryRunner(self.connection) self.builder = MysqlQueryBuilder() self.tableScheme = MysqlTableBuilder else: raise ValueError('Database driver not found.') pass
def initiate(self): '\n Initiates the database driver classes\n :return: None\n ' if (self.driver == 'mysql'): from .mysql.connection import MysqlConnection from .mysql.queryRunner import MysqlQueryRunner from .mysql.queryBuilder import MysqlQueryBuilder from .mysql.tableBuilder import MysqlTableBuilder __ = MysqlConnection(self.credentials) self.connection = __.make() self.execute = MysqlQueryRunner(self.connection) self.builder = MysqlQueryBuilder() self.tableScheme = MysqlTableBuilder else: raise ValueError('Database driver not found.') pass<|docstring|>Initiates the database driver classes :return: None<|endoftext|>
cbe0f6b275b6b5ff49c1d27bc458a8c716057a636223503f55cdf60a332a782a
def get(self, returnData=False): '\n Builds and executes query to fetch data from database.\n :return: MySQL query results\n ' if self.query: query = self.query else: query = self.builder.build() if returnData: result = self.execute.all(query) else: result = [] for row in self.execute.all(query): result.append(self.initializeModel(row)) return result
Builds and executes query to fetch data from database. :return: MySQL query results
lib/FluentDB/core/FluentDB.py
get
olavoasantos/FluentDB
2
python
def get(self, returnData=False): '\n Builds and executes query to fetch data from database.\n :return: MySQL query results\n ' if self.query: query = self.query else: query = self.builder.build() if returnData: result = self.execute.all(query) else: result = [] for row in self.execute.all(query): result.append(self.initializeModel(row)) return result
def get(self, returnData=False): '\n Builds and executes query to fetch data from database.\n :return: MySQL query results\n ' if self.query: query = self.query else: query = self.builder.build() if returnData: result = self.execute.all(query) else: result = [] for row in self.execute.all(query): result.append(self.initializeModel(row)) return result<|docstring|>Builds and executes query to fetch data from database. :return: MySQL query results<|endoftext|>
5e9ab42a2a5a27aa61ea74ed53086ae76abea4fea78d6d890fa17f587a25ed58
def raw(self, query): '\n Sometimes you may need to use a raw expression in a query.\n :param query: STR MySQL expression\n :return: self\n ' self.query = query return self
Sometimes you may need to use a raw expression in a query. :param query: STR MySQL expression :return: self
lib/FluentDB/core/FluentDB.py
raw
olavoasantos/FluentDB
2
python
def raw(self, query): '\n Sometimes you may need to use a raw expression in a query.\n :param query: STR MySQL expression\n :return: self\n ' self.query = query return self
def raw(self, query): '\n Sometimes you may need to use a raw expression in a query.\n :param query: STR MySQL expression\n :return: self\n ' self.query = query return self<|docstring|>Sometimes you may need to use a raw expression in a query. :param query: STR MySQL expression :return: self<|endoftext|>
ac901ac9d2a0ea06a9feb4a7a72a3e126be6336405ce1c43d78cf37df5909851
def limitBy(self, limit, offset=None): '\n Sets the limit and offset for a query.\n :param limit: INT Limit of rows to be fetched\n :param offset: INT Offset of the first row to return\n :return: self\n ' self.builder.limitBy(limit, offset) return self
Sets the limit and offset for a query. :param limit: INT Limit of rows to be fetched :param offset: INT Offset of the first row to return :return: self
lib/FluentDB/core/FluentDB.py
limitBy
olavoasantos/FluentDB
2
python
def limitBy(self, limit, offset=None): '\n Sets the limit and offset for a query.\n :param limit: INT Limit of rows to be fetched\n :param offset: INT Offset of the first row to return\n :return: self\n ' self.builder.limitBy(limit, offset) return self
def limitBy(self, limit, offset=None): '\n Sets the limit and offset for a query.\n :param limit: INT Limit of rows to be fetched\n :param offset: INT Offset of the first row to return\n :return: self\n ' self.builder.limitBy(limit, offset) return self<|docstring|>Sets the limit and offset for a query. :param limit: INT Limit of rows to be fetched :param offset: INT Offset of the first row to return :return: self<|endoftext|>
6c16d4e7f2fc6c638e8074fa9b32bfb1f478c8e26892753e0ad3a79f8c80a4a2
def orderBy(self, column, order='ASC'): '\n Sort the result of the query by a given column.\n :param column: STR Column name\n :param order: STR Direction of the sort and may be either (ASC | DESC)\n :return: self\n ' self.builder.orderBy(column, order) return self
Sort the result of the query by a given column. :param column: STR Column name :param order: STR Direction of the sort and may be either (ASC | DESC) :return: self
lib/FluentDB/core/FluentDB.py
orderBy
olavoasantos/FluentDB
2
python
def orderBy(self, column, order='ASC'): '\n Sort the result of the query by a given column.\n :param column: STR Column name\n :param order: STR Direction of the sort and may be either (ASC | DESC)\n :return: self\n ' self.builder.orderBy(column, order) return self
def orderBy(self, column, order='ASC'): '\n Sort the result of the query by a given column.\n :param column: STR Column name\n :param order: STR Direction of the sort and may be either (ASC | DESC)\n :return: self\n ' self.builder.orderBy(column, order) return self<|docstring|>Sort the result of the query by a given column. :param column: STR Column name :param order: STR Direction of the sort and may be either (ASC | DESC) :return: self<|endoftext|>
e31025ef8372a09d020215d711ebac5aa144b0c8e24b9ba0932aca3e47797c93
def where(self, *args): "\n Add where clauses to the query.\n :param args: Single arg Adds a clause to search for an entry with a given id (e.g. .where(3) -> WHERE id=3)\n :arg1 INT Sets id\n Two args Adds a clause to search for an entry with a given column and a given value (e.g. .where('name', 'Newton') -> WHERE name='Newton')\n :arg1 STR Column name\n :arg2 STR/INT Value\n Three args Adds a clause to search for an entry with a given column, an operator and a given value (e.g. .where('name', '<>', 'Newton') -> WHERE name<>'Newton')\n :arg1 STR Column name\n :arg2 STR MySQL operator\n :arg3 STR/INT Value\n :return: self\n " self.builder.where(*args) return self
Add where clauses to the query. :param args: Single arg Adds a clause to search for an entry with a given id (e.g. .where(3) -> WHERE id=3) :arg1 INT Sets id Two args Adds a clause to search for an entry with a given column and a given value (e.g. .where('name', 'Newton') -> WHERE name='Newton') :arg1 STR Column name :arg2 STR/INT Value Three args Adds a clause to search for an entry with a given column, an operator and a given value (e.g. .where('name', '<>', 'Newton') -> WHERE name<>'Newton') :arg1 STR Column name :arg2 STR MySQL operator :arg3 STR/INT Value :return: self
lib/FluentDB/core/FluentDB.py
where
olavoasantos/FluentDB
2
python
def where(self, *args): "\n Add where clauses to the query.\n :param args: Single arg Adds a clause to search for an entry with a given id (e.g. .where(3) -> WHERE id=3)\n :arg1 INT Sets id\n Two args Adds a clause to search for an entry with a given column and a given value (e.g. .where('name', 'Newton') -> WHERE name='Newton')\n :arg1 STR Column name\n :arg2 STR/INT Value\n Three args Adds a clause to search for an entry with a given column, an operator and a given value (e.g. .where('name', '<>', 'Newton') -> WHERE name<>'Newton')\n :arg1 STR Column name\n :arg2 STR MySQL operator\n :arg3 STR/INT Value\n :return: self\n " self.builder.where(*args) return self
def where(self, *args): "\n Add where clauses to the query.\n :param args: Single arg Adds a clause to search for an entry with a given id (e.g. .where(3) -> WHERE id=3)\n :arg1 INT Sets id\n Two args Adds a clause to search for an entry with a given column and a given value (e.g. .where('name', 'Newton') -> WHERE name='Newton')\n :arg1 STR Column name\n :arg2 STR/INT Value\n Three args Adds a clause to search for an entry with a given column, an operator and a given value (e.g. .where('name', '<>', 'Newton') -> WHERE name<>'Newton')\n :arg1 STR Column name\n :arg2 STR MySQL operator\n :arg3 STR/INT Value\n :return: self\n " self.builder.where(*args) return self<|docstring|>Add where clauses to the query. :param args: Single arg Adds a clause to search for an entry with a given id (e.g. .where(3) -> WHERE id=3) :arg1 INT Sets id Two args Adds a clause to search for an entry with a given column and a given value (e.g. .where('name', 'Newton') -> WHERE name='Newton') :arg1 STR Column name :arg2 STR/INT Value Three args Adds a clause to search for an entry with a given column, an operator and a given value (e.g. .where('name', '<>', 'Newton') -> WHERE name<>'Newton') :arg1 STR Column name :arg2 STR MySQL operator :arg3 STR/INT Value :return: self<|endoftext|>
23ca2707fe806cff39f227c5737368f5d591a4407e2b565a0beece48a76b80df
def table(self, name): '\n Sets the table name to the query.\n :param table: STR Table name\n :return: self\n ' self.Table = name self.builder.table(name) return self
Sets the table name to the query. :param table: STR Table name :return: self
lib/FluentDB/core/FluentDB.py
table
olavoasantos/FluentDB
2
python
def table(self, name): '\n Sets the table name to the query.\n :param table: STR Table name\n :return: self\n ' self.Table = name self.builder.table(name) return self
def table(self, name): '\n Sets the table name to the query.\n :param table: STR Table name\n :return: self\n ' self.Table = name self.builder.table(name) return self<|docstring|>Sets the table name to the query. :param table: STR Table name :return: self<|endoftext|>
fc5cd379e237d02611448e39dcdc92e05d7b1d32963aec2c1b12d1ea760214ff
def select(self, *columns): '\n Specify a custom select clause for the query.\n :param columns: STR Multiple arguments each with a column name (defaults to "*")\n :return: self\n ' self.builder.select(*columns) return self
Specify a custom select clause for the query. :param columns: STR Multiple arguments each with a column name (defaults to "*") :return: self
lib/FluentDB/core/FluentDB.py
select
olavoasantos/FluentDB
2
python
def select(self, *columns): '\n Specify a custom select clause for the query.\n :param columns: STR Multiple arguments each with a column name (defaults to "*")\n :return: self\n ' self.builder.select(*columns) return self
def select(self, *columns): '\n Specify a custom select clause for the query.\n :param columns: STR Multiple arguments each with a column name (defaults to "*")\n :return: self\n ' self.builder.select(*columns) return self<|docstring|>Specify a custom select clause for the query. :param columns: STR Multiple arguments each with a column name (defaults to "*") :return: self<|endoftext|>
9c30e6ab85ac2c540e4d186b3529aca6e67ad327a7f6b7ef7a5d289b4c05353d
def fibonacci_spheres(samples=1, v=1, randomize=True, r=0): 'Produces pseudo-evenly distributed points on the surface\n of a sphere\n\n Optional arguments give the number of points to return, volume\n of the sphere and to randomize the points initial positions.\n\n returns 3D coordinates of specified number of points\n ' radius = (sphere_vol_to_r(v) - r) rnd = (1 if (randomize is False) else np.random.randint(10)) points = [] offset = (2.0 / samples) increment = (np.pi * (3.0 - np.sqrt(5.0))) for i in range(samples): y = (((i * offset) - 1) + (offset / 2)) r = np.sqrt((1 - pow(y, 2))) phi = (((i + rnd) % samples) * increment) x = (np.cos(phi) * r) z = (np.sin(phi) * r) z = (z * radius) y = (y * radius) x = (x * radius) points.append(np.array([x, y, z])) return points
Produces pseudo-evenly distributed points on the surface of a sphere Optional arguments give the number of points to return, volume of the sphere and to randomize the points initial positions. returns 3D coordinates of specified number of points
PyEscape/escape_points.py
fibonacci_spheres
philippeitis/NarrowEscapeSimulator
0
python
def fibonacci_spheres(samples=1, v=1, randomize=True, r=0): 'Produces pseudo-evenly distributed points on the surface\n of a sphere\n\n Optional arguments give the number of points to return, volume\n of the sphere and to randomize the points initial positions.\n\n returns 3D coordinates of specified number of points\n ' radius = (sphere_vol_to_r(v) - r) rnd = (1 if (randomize is False) else np.random.randint(10)) points = [] offset = (2.0 / samples) increment = (np.pi * (3.0 - np.sqrt(5.0))) for i in range(samples): y = (((i * offset) - 1) + (offset / 2)) r = np.sqrt((1 - pow(y, 2))) phi = (((i + rnd) % samples) * increment) x = (np.cos(phi) * r) z = (np.sin(phi) * r) z = (z * radius) y = (y * radius) x = (x * radius) points.append(np.array([x, y, z])) return points
def fibonacci_spheres(samples=1, v=1, randomize=True, r=0): 'Produces pseudo-evenly distributed points on the surface\n of a sphere\n\n Optional arguments give the number of points to return, volume\n of the sphere and to randomize the points initial positions.\n\n returns 3D coordinates of specified number of points\n ' radius = (sphere_vol_to_r(v) - r) rnd = (1 if (randomize is False) else np.random.randint(10)) points = [] offset = (2.0 / samples) increment = (np.pi * (3.0 - np.sqrt(5.0))) for i in range(samples): y = (((i * offset) - 1) + (offset / 2)) r = np.sqrt((1 - pow(y, 2))) phi = (((i + rnd) % samples) * increment) x = (np.cos(phi) * r) z = (np.sin(phi) * r) z = (z * radius) y = (y * radius) x = (x * radius) points.append(np.array([x, y, z])) return points<|docstring|>Produces pseudo-evenly distributed points on the surface of a sphere Optional arguments give the number of points to return, volume of the sphere and to randomize the points initial positions. returns 3D coordinates of specified number of points<|endoftext|>
7b8f28d2c0fb6fc70c1559d899a1f4a595b05868e5ecc16c7c5a31b34f09cefb
def points_on_cube_surface(samples, r=1): 'Gives a random distribution of points on a cube surface\n\n A number of samples and an optional cube radius can be given\n\n returns a series of points randomly distributed on surface of cube\n ' points = [] r = np.cbrt(r) for i in range(samples): p = ((np.random.random(3) * (r / 2)) * np.random.choice([(- 1), 1])) dim = np.random.choice([0, 1, 2]) p[dim] = ((r / 2) * np.random.choice([(- 1), 1])) points.append(p) return points
Gives a random distribution of points on a cube surface A number of samples and an optional cube radius can be given returns a series of points randomly distributed on surface of cube
PyEscape/escape_points.py
points_on_cube_surface
philippeitis/NarrowEscapeSimulator
0
python
def points_on_cube_surface(samples, r=1): 'Gives a random distribution of points on a cube surface\n\n A number of samples and an optional cube radius can be given\n\n returns a series of points randomly distributed on surface of cube\n ' points = [] r = np.cbrt(r) for i in range(samples): p = ((np.random.random(3) * (r / 2)) * np.random.choice([(- 1), 1])) dim = np.random.choice([0, 1, 2]) p[dim] = ((r / 2) * np.random.choice([(- 1), 1])) points.append(p) return points
def points_on_cube_surface(samples, r=1): 'Gives a random distribution of points on a cube surface\n\n A number of samples and an optional cube radius can be given\n\n returns a series of points randomly distributed on surface of cube\n ' points = [] r = np.cbrt(r) for i in range(samples): p = ((np.random.random(3) * (r / 2)) * np.random.choice([(- 1), 1])) dim = np.random.choice([0, 1, 2]) p[dim] = ((r / 2) * np.random.choice([(- 1), 1])) points.append(p) return points<|docstring|>Gives a random distribution of points on a cube surface A number of samples and an optional cube radius can be given returns a series of points randomly distributed on surface of cube<|endoftext|>
0d0e55fdf543b45ac231b0f58bfffd3583e93cdd9899a961000c35874170414a
def init(url, token, user_id, force=False, course_ids=None): 'initializes working tree: creates local .mdt/config, with chosen courses' ms = MoodleSession(moodle_url=url, token=token) wrapped = wrappers.CourseListResponse(ms.core_enrol_get_users_courses(user_id)) courses = list(wrapped) courses.sort(key=(lambda course: course.full_name)) saved_data = [] if ((course_ids is None) or force): choices = interaction.input_choices_from_list(courses, '\n choose courses, seperate with space: ') if (len(choices) == 0): print('nothing chosen.') raise SystemExit(0) chosen_courses = [courses[c] for c in choices] print(('using:\n' + ' '.join([str(c) for c in chosen_courses]))) course_ids = [c.id for c in chosen_courses] saved_data = [c for c in wrapped.raw if (c['id'] in course_ids)] try: wt = WorkTree(init=True, force=force) except FileExistsError: print('already initialized') raise SystemExit(1) wt.courses = saved_data wt.write_local_config(('courseids = ' + str(course_ids))) sync(url=url, token=token)
initializes working tree: creates local .mdt/config, with chosen courses
commands/webservice/init.py
init
einsweniger/mdt
9
python
def init(url, token, user_id, force=False, course_ids=None): ms = MoodleSession(moodle_url=url, token=token) wrapped = wrappers.CourseListResponse(ms.core_enrol_get_users_courses(user_id)) courses = list(wrapped) courses.sort(key=(lambda course: course.full_name)) saved_data = [] if ((course_ids is None) or force): choices = interaction.input_choices_from_list(courses, '\n choose courses, seperate with space: ') if (len(choices) == 0): print('nothing chosen.') raise SystemExit(0) chosen_courses = [courses[c] for c in choices] print(('using:\n' + ' '.join([str(c) for c in chosen_courses]))) course_ids = [c.id for c in chosen_courses] saved_data = [c for c in wrapped.raw if (c['id'] in course_ids)] try: wt = WorkTree(init=True, force=force) except FileExistsError: print('already initialized') raise SystemExit(1) wt.courses = saved_data wt.write_local_config(('courseids = ' + str(course_ids))) sync(url=url, token=token)
def init(url, token, user_id, force=False, course_ids=None): ms = MoodleSession(moodle_url=url, token=token) wrapped = wrappers.CourseListResponse(ms.core_enrol_get_users_courses(user_id)) courses = list(wrapped) courses.sort(key=(lambda course: course.full_name)) saved_data = [] if ((course_ids is None) or force): choices = interaction.input_choices_from_list(courses, '\n choose courses, seperate with space: ') if (len(choices) == 0): print('nothing chosen.') raise SystemExit(0) chosen_courses = [courses[c] for c in choices] print(('using:\n' + ' '.join([str(c) for c in chosen_courses]))) course_ids = [c.id for c in chosen_courses] saved_data = [c for c in wrapped.raw if (c['id'] in course_ids)] try: wt = WorkTree(init=True, force=force) except FileExistsError: print('already initialized') raise SystemExit(1) wt.courses = saved_data wt.write_local_config(('courseids = ' + str(course_ids))) sync(url=url, token=token)<|docstring|>initializes working tree: creates local .mdt/config, with chosen courses<|endoftext|>
73b7b561979c10d0e25efdb6a880d5ad7d432d06f996b48f1648205c6535ef8c
def KnoelliaFlava(directed: bool=False, verbose: int=2, cache_path: str='graphs/string', **additional_graph_kwargs: Dict) -> EnsmallenGraph: 'Return new instance of the Knoellia flava graph.\n\n The graph is automatically retrieved from the STRING repository. \n\n\t\n\n Parameters\n -------------------\n directed: bool = False,\n Wether to load the graph as directed or undirected.\n By default false.\n verbose: int = 2,\n Wether to show loading bars during the retrieval and building\n of the graph.\n cache_path: str = "graphs",\n Where to store the downloaded graphs.\n additional_graph_kwargs: Dict,\n Additional graph kwargs.\n\n Returns\n -----------------------\n Instace of Knoellia flava graph.\n\n\tReport\n\t---------------------\n\tAt the time of rendering these methods (please see datetime below), the graph\n\thad the following characteristics:\n\t\n\tDatetime: 2021-02-03 22:20:22.123680\n\t\n\tThe undirected graph Knoellia flava has 3342 nodes and 316069 weighted\n\tedges, of which none are self-loops. The graph is dense as it has a density\n\tof 0.05661 and has 14 connected components, where the component with most\n\tnodes has 3313 nodes and the component with the least nodes has 2 nodes.\n\tThe graph median node degree is 161, the mean node degree is 189.15, and\n\tthe node degree mode is 2. The top 5 most central nodes are 1385518.N798_14485\n\t(degree 1118), 1385518.N798_05830 (degree 1018), 1385518.N798_00720 (degree\n\t1004), 1385518.N798_15355 (degree 995) and 1385518.N798_09470 (degree 980).\n\t\n\n\tReferences\n\t---------------------\n\tPlease cite the following if you use the data:\n\t\n\t@article{szklarczyk2019string,\n\t title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},\n\t author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},\n\t journal={Nucleic acids research},\n\t volume={47},\n\t number={D1},\n\t pages={D607--D613},\n\t year={2019},\n\t publisher={Oxford University Press}\n\t}\n\t\n\n\tUsage example\n\t----------------------\n\tThe usage of this graph is relatively straightforward:\n\t\n\t.. code:: python\n\t\n\t # First import the function to retrieve the graph from the datasets\n\t from ensmallen_graph.datasets.string import KnoelliaFlava\n\t\n\t # Then load the graph\n\t graph = KnoelliaFlava()\n\t\n\t # Finally, you can do anything with it, for instance, compute its report:\n\t print(graph)\n\t\n\t # If you need to run a link prediction task with validation,\n\t # you can split the graph using a connected holdout as follows:\n\t train_graph, validation_graph = graph.connected_holdout(\n\t # You can use an 80/20 split the holdout, for example.\n\t train_size=0.8,\n\t # The random state is used to reproduce the holdout.\n\t random_state=42,\n\t # Wether to show a loading bar.\n\t verbose=True\n\t )\n\t\n\t # Remember that, if you need, you can enable the memory-time trade-offs:\n\t train_graph.enable(\n\t vector_sources=True,\n\t vector_destinations=True,\n\t vector_outbounds=True\n\t )\n\t\n\t # Consider using the methods made available in the Embiggen package\n\t # to run graph embedding or link prediction tasks.\n ' return AutomaticallyRetrievedGraph(graph_name='KnoelliaFlava', dataset='string', directed=directed, verbose=verbose, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs)()
Return new instance of the Knoellia flava graph. The graph is automatically retrieved from the STRING repository. Parameters ------------------- directed: bool = False, Wether to load the graph as directed or undirected. By default false. verbose: int = 2, Wether to show loading bars during the retrieval and building of the graph. cache_path: str = "graphs", Where to store the downloaded graphs. additional_graph_kwargs: Dict, Additional graph kwargs. Returns ----------------------- Instace of Knoellia flava graph. Report --------------------- At the time of rendering these methods (please see datetime below), the graph had the following characteristics: Datetime: 2021-02-03 22:20:22.123680 The undirected graph Knoellia flava has 3342 nodes and 316069 weighted edges, of which none are self-loops. The graph is dense as it has a density of 0.05661 and has 14 connected components, where the component with most nodes has 3313 nodes and the component with the least nodes has 2 nodes. The graph median node degree is 161, the mean node degree is 189.15, and the node degree mode is 2. The top 5 most central nodes are 1385518.N798_14485 (degree 1118), 1385518.N798_05830 (degree 1018), 1385518.N798_00720 (degree 1004), 1385518.N798_15355 (degree 995) and 1385518.N798_09470 (degree 980). References --------------------- Please cite the following if you use the data: @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } Usage example ---------------------- The usage of this graph is relatively straightforward: .. code:: python # First import the function to retrieve the graph from the datasets from ensmallen_graph.datasets.string import KnoelliaFlava # Then load the graph graph = KnoelliaFlava() # Finally, you can do anything with it, for instance, compute its report: print(graph) # If you need to run a link prediction task with validation, # you can split the graph using a connected holdout as follows: train_graph, validation_graph = graph.connected_holdout( # You can use an 80/20 split the holdout, for example. train_size=0.8, # The random state is used to reproduce the holdout. random_state=42, # Wether to show a loading bar. verbose=True ) # Remember that, if you need, you can enable the memory-time trade-offs: train_graph.enable( vector_sources=True, vector_destinations=True, vector_outbounds=True ) # Consider using the methods made available in the Embiggen package # to run graph embedding or link prediction tasks.
bindings/python/ensmallen_graph/datasets/string/knoelliaflava.py
KnoelliaFlava
caufieldjh/ensmallen_graph
0
python
def KnoelliaFlava(directed: bool=False, verbose: int=2, cache_path: str='graphs/string', **additional_graph_kwargs: Dict) -> EnsmallenGraph: 'Return new instance of the Knoellia flava graph.\n\n The graph is automatically retrieved from the STRING repository. \n\n\t\n\n Parameters\n -------------------\n directed: bool = False,\n Wether to load the graph as directed or undirected.\n By default false.\n verbose: int = 2,\n Wether to show loading bars during the retrieval and building\n of the graph.\n cache_path: str = "graphs",\n Where to store the downloaded graphs.\n additional_graph_kwargs: Dict,\n Additional graph kwargs.\n\n Returns\n -----------------------\n Instace of Knoellia flava graph.\n\n\tReport\n\t---------------------\n\tAt the time of rendering these methods (please see datetime below), the graph\n\thad the following characteristics:\n\t\n\tDatetime: 2021-02-03 22:20:22.123680\n\t\n\tThe undirected graph Knoellia flava has 3342 nodes and 316069 weighted\n\tedges, of which none are self-loops. The graph is dense as it has a density\n\tof 0.05661 and has 14 connected components, where the component with most\n\tnodes has 3313 nodes and the component with the least nodes has 2 nodes.\n\tThe graph median node degree is 161, the mean node degree is 189.15, and\n\tthe node degree mode is 2. The top 5 most central nodes are 1385518.N798_14485\n\t(degree 1118), 1385518.N798_05830 (degree 1018), 1385518.N798_00720 (degree\n\t1004), 1385518.N798_15355 (degree 995) and 1385518.N798_09470 (degree 980).\n\t\n\n\tReferences\n\t---------------------\n\tPlease cite the following if you use the data:\n\t\n\t@article{szklarczyk2019string,\n\t title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},\n\t author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},\n\t journal={Nucleic acids research},\n\t volume={47},\n\t number={D1},\n\t pages={D607--D613},\n\t year={2019},\n\t publisher={Oxford University Press}\n\t}\n\t\n\n\tUsage example\n\t----------------------\n\tThe usage of this graph is relatively straightforward:\n\t\n\t.. code:: python\n\t\n\t # First import the function to retrieve the graph from the datasets\n\t from ensmallen_graph.datasets.string import KnoelliaFlava\n\t\n\t # Then load the graph\n\t graph = KnoelliaFlava()\n\t\n\t # Finally, you can do anything with it, for instance, compute its report:\n\t print(graph)\n\t\n\t # If you need to run a link prediction task with validation,\n\t # you can split the graph using a connected holdout as follows:\n\t train_graph, validation_graph = graph.connected_holdout(\n\t # You can use an 80/20 split the holdout, for example.\n\t train_size=0.8,\n\t # The random state is used to reproduce the holdout.\n\t random_state=42,\n\t # Wether to show a loading bar.\n\t verbose=True\n\t )\n\t\n\t # Remember that, if you need, you can enable the memory-time trade-offs:\n\t train_graph.enable(\n\t vector_sources=True,\n\t vector_destinations=True,\n\t vector_outbounds=True\n\t )\n\t\n\t # Consider using the methods made available in the Embiggen package\n\t # to run graph embedding or link prediction tasks.\n ' return AutomaticallyRetrievedGraph(graph_name='KnoelliaFlava', dataset='string', directed=directed, verbose=verbose, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs)()
def KnoelliaFlava(directed: bool=False, verbose: int=2, cache_path: str='graphs/string', **additional_graph_kwargs: Dict) -> EnsmallenGraph: 'Return new instance of the Knoellia flava graph.\n\n The graph is automatically retrieved from the STRING repository. \n\n\t\n\n Parameters\n -------------------\n directed: bool = False,\n Wether to load the graph as directed or undirected.\n By default false.\n verbose: int = 2,\n Wether to show loading bars during the retrieval and building\n of the graph.\n cache_path: str = "graphs",\n Where to store the downloaded graphs.\n additional_graph_kwargs: Dict,\n Additional graph kwargs.\n\n Returns\n -----------------------\n Instace of Knoellia flava graph.\n\n\tReport\n\t---------------------\n\tAt the time of rendering these methods (please see datetime below), the graph\n\thad the following characteristics:\n\t\n\tDatetime: 2021-02-03 22:20:22.123680\n\t\n\tThe undirected graph Knoellia flava has 3342 nodes and 316069 weighted\n\tedges, of which none are self-loops. The graph is dense as it has a density\n\tof 0.05661 and has 14 connected components, where the component with most\n\tnodes has 3313 nodes and the component with the least nodes has 2 nodes.\n\tThe graph median node degree is 161, the mean node degree is 189.15, and\n\tthe node degree mode is 2. The top 5 most central nodes are 1385518.N798_14485\n\t(degree 1118), 1385518.N798_05830 (degree 1018), 1385518.N798_00720 (degree\n\t1004), 1385518.N798_15355 (degree 995) and 1385518.N798_09470 (degree 980).\n\t\n\n\tReferences\n\t---------------------\n\tPlease cite the following if you use the data:\n\t\n\t@article{szklarczyk2019string,\n\t title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},\n\t author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},\n\t journal={Nucleic acids research},\n\t volume={47},\n\t number={D1},\n\t pages={D607--D613},\n\t year={2019},\n\t publisher={Oxford University Press}\n\t}\n\t\n\n\tUsage example\n\t----------------------\n\tThe usage of this graph is relatively straightforward:\n\t\n\t.. code:: python\n\t\n\t # First import the function to retrieve the graph from the datasets\n\t from ensmallen_graph.datasets.string import KnoelliaFlava\n\t\n\t # Then load the graph\n\t graph = KnoelliaFlava()\n\t\n\t # Finally, you can do anything with it, for instance, compute its report:\n\t print(graph)\n\t\n\t # If you need to run a link prediction task with validation,\n\t # you can split the graph using a connected holdout as follows:\n\t train_graph, validation_graph = graph.connected_holdout(\n\t # You can use an 80/20 split the holdout, for example.\n\t train_size=0.8,\n\t # The random state is used to reproduce the holdout.\n\t random_state=42,\n\t # Wether to show a loading bar.\n\t verbose=True\n\t )\n\t\n\t # Remember that, if you need, you can enable the memory-time trade-offs:\n\t train_graph.enable(\n\t vector_sources=True,\n\t vector_destinations=True,\n\t vector_outbounds=True\n\t )\n\t\n\t # Consider using the methods made available in the Embiggen package\n\t # to run graph embedding or link prediction tasks.\n ' return AutomaticallyRetrievedGraph(graph_name='KnoelliaFlava', dataset='string', directed=directed, verbose=verbose, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs)()<|docstring|>Return new instance of the Knoellia flava graph. The graph is automatically retrieved from the STRING repository. Parameters ------------------- directed: bool = False, Wether to load the graph as directed or undirected. By default false. verbose: int = 2, Wether to show loading bars during the retrieval and building of the graph. cache_path: str = "graphs", Where to store the downloaded graphs. additional_graph_kwargs: Dict, Additional graph kwargs. Returns ----------------------- Instace of Knoellia flava graph. Report --------------------- At the time of rendering these methods (please see datetime below), the graph had the following characteristics: Datetime: 2021-02-03 22:20:22.123680 The undirected graph Knoellia flava has 3342 nodes and 316069 weighted edges, of which none are self-loops. The graph is dense as it has a density of 0.05661 and has 14 connected components, where the component with most nodes has 3313 nodes and the component with the least nodes has 2 nodes. The graph median node degree is 161, the mean node degree is 189.15, and the node degree mode is 2. The top 5 most central nodes are 1385518.N798_14485 (degree 1118), 1385518.N798_05830 (degree 1018), 1385518.N798_00720 (degree 1004), 1385518.N798_15355 (degree 995) and 1385518.N798_09470 (degree 980). References --------------------- Please cite the following if you use the data: @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } Usage example ---------------------- The usage of this graph is relatively straightforward: .. code:: python # First import the function to retrieve the graph from the datasets from ensmallen_graph.datasets.string import KnoelliaFlava # Then load the graph graph = KnoelliaFlava() # Finally, you can do anything with it, for instance, compute its report: print(graph) # If you need to run a link prediction task with validation, # you can split the graph using a connected holdout as follows: train_graph, validation_graph = graph.connected_holdout( # You can use an 80/20 split the holdout, for example. train_size=0.8, # The random state is used to reproduce the holdout. random_state=42, # Wether to show a loading bar. verbose=True ) # Remember that, if you need, you can enable the memory-time trade-offs: train_graph.enable( vector_sources=True, vector_destinations=True, vector_outbounds=True ) # Consider using the methods made available in the Embiggen package # to run graph embedding or link prediction tasks.<|endoftext|>
97f3fdaa37d34c8b4193c78216ae5ac97f9fa3a923e7df683b58c17a6f1c215d
def get_client(self): '\n Get ibm_boto3 client.\n :return: ibm_boto3 client\n ' return self.s3_client
Get ibm_boto3 client. :return: ibm_boto3 client
lithops/storage/backends/ceph/ceph.py
get_client
ayalaraanan/lithops
55
python
def get_client(self): '\n Get ibm_boto3 client.\n :return: ibm_boto3 client\n ' return self.s3_client
def get_client(self): '\n Get ibm_boto3 client.\n :return: ibm_boto3 client\n ' return self.s3_client<|docstring|>Get ibm_boto3 client. :return: ibm_boto3 client<|endoftext|>
4b11e7adcb2f6334b32f288384496841432c8db90990fd9f843e4104686904a0
def put_object(self, bucket_name, key, data): '\n Put an object in COS. Override the object if the key already exists.\n :param key: key of the object.\n :param data: data of the object\n :type data: str/bytes\n :return: None\n ' retries = 0 status = None while (status is None): try: res = self.s3_client.put_object(Bucket=bucket_name, Key=key, Body=data) status = ('OK' if (res['ResponseMetadata']['HTTPStatusCode'] == 200) else 'Error') try: logger.debug('PUT Object {} - Size: {} - {}'.format(key, sizeof_fmt(len(data)), status)) except Exception: logger.debug('PUT Object {} {}'.format(key, status)) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == 'NoSuchKey'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('PUT Object timeout. Retrying request') retries += 1 return True
Put an object in COS. Override the object if the key already exists. :param key: key of the object. :param data: data of the object :type data: str/bytes :return: None
lithops/storage/backends/ceph/ceph.py
put_object
ayalaraanan/lithops
55
python
def put_object(self, bucket_name, key, data): '\n Put an object in COS. Override the object if the key already exists.\n :param key: key of the object.\n :param data: data of the object\n :type data: str/bytes\n :return: None\n ' retries = 0 status = None while (status is None): try: res = self.s3_client.put_object(Bucket=bucket_name, Key=key, Body=data) status = ('OK' if (res['ResponseMetadata']['HTTPStatusCode'] == 200) else 'Error') try: logger.debug('PUT Object {} - Size: {} - {}'.format(key, sizeof_fmt(len(data)), status)) except Exception: logger.debug('PUT Object {} {}'.format(key, status)) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == 'NoSuchKey'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('PUT Object timeout. Retrying request') retries += 1 return True
def put_object(self, bucket_name, key, data): '\n Put an object in COS. Override the object if the key already exists.\n :param key: key of the object.\n :param data: data of the object\n :type data: str/bytes\n :return: None\n ' retries = 0 status = None while (status is None): try: res = self.s3_client.put_object(Bucket=bucket_name, Key=key, Body=data) status = ('OK' if (res['ResponseMetadata']['HTTPStatusCode'] == 200) else 'Error') try: logger.debug('PUT Object {} - Size: {} - {}'.format(key, sizeof_fmt(len(data)), status)) except Exception: logger.debug('PUT Object {} {}'.format(key, status)) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == 'NoSuchKey'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('PUT Object timeout. Retrying request') retries += 1 return True<|docstring|>Put an object in COS. Override the object if the key already exists. :param key: key of the object. :param data: data of the object :type data: str/bytes :return: None<|endoftext|>
359e08c3f4c851ee72c19183ad0ae156c75403a14dc120713e69785fbe920168
def get_object(self, bucket_name, key, stream=False, extra_get_args={}): '\n Get object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist.\n :param key: key of the object\n :return: Data of the object\n :rtype: str/bytes\n ' data = None retries = 0 while (data is None): try: r = self.s3_client.get_object(Bucket=bucket_name, Key=key, **extra_get_args) if stream: data = r['Body'] else: data = r['Body'].read() except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == 'NoSuchKey'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('GET Object timeout. Retrying request') retries += 1 return data
Get object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist. :param key: key of the object :return: Data of the object :rtype: str/bytes
lithops/storage/backends/ceph/ceph.py
get_object
ayalaraanan/lithops
55
python
def get_object(self, bucket_name, key, stream=False, extra_get_args={}): '\n Get object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist.\n :param key: key of the object\n :return: Data of the object\n :rtype: str/bytes\n ' data = None retries = 0 while (data is None): try: r = self.s3_client.get_object(Bucket=bucket_name, Key=key, **extra_get_args) if stream: data = r['Body'] else: data = r['Body'].read() except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == 'NoSuchKey'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('GET Object timeout. Retrying request') retries += 1 return data
def get_object(self, bucket_name, key, stream=False, extra_get_args={}): '\n Get object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist.\n :param key: key of the object\n :return: Data of the object\n :rtype: str/bytes\n ' data = None retries = 0 while (data is None): try: r = self.s3_client.get_object(Bucket=bucket_name, Key=key, **extra_get_args) if stream: data = r['Body'] else: data = r['Body'].read() except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == 'NoSuchKey'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('GET Object timeout. Retrying request') retries += 1 return data<|docstring|>Get object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist. :param key: key of the object :return: Data of the object :rtype: str/bytes<|endoftext|>
a539a7a53f215185d0051dd656701fe51dc6de48b78f68eb90985daa460272d0
def upload_file(self, file_name, bucket, key=None, extra_args={}): 'Upload a file to an S3 bucket\n\n :param file_name: File to upload\n :param bucket: Bucket to upload to\n :param key: S3 object name. If not specified then file_name is used\n :return: True if file was uploaded, else False\n ' if (key is None): key = os.path.basename(file_name) try: self.s3_client.upload_file(file_name, bucket, key, ExtraArgs=extra_args) except botocore.exceptions.ClientError as e: logging.error(e) return False return True
Upload a file to an S3 bucket :param file_name: File to upload :param bucket: Bucket to upload to :param key: S3 object name. If not specified then file_name is used :return: True if file was uploaded, else False
lithops/storage/backends/ceph/ceph.py
upload_file
ayalaraanan/lithops
55
python
def upload_file(self, file_name, bucket, key=None, extra_args={}): 'Upload a file to an S3 bucket\n\n :param file_name: File to upload\n :param bucket: Bucket to upload to\n :param key: S3 object name. If not specified then file_name is used\n :return: True if file was uploaded, else False\n ' if (key is None): key = os.path.basename(file_name) try: self.s3_client.upload_file(file_name, bucket, key, ExtraArgs=extra_args) except botocore.exceptions.ClientError as e: logging.error(e) return False return True
def upload_file(self, file_name, bucket, key=None, extra_args={}): 'Upload a file to an S3 bucket\n\n :param file_name: File to upload\n :param bucket: Bucket to upload to\n :param key: S3 object name. If not specified then file_name is used\n :return: True if file was uploaded, else False\n ' if (key is None): key = os.path.basename(file_name) try: self.s3_client.upload_file(file_name, bucket, key, ExtraArgs=extra_args) except botocore.exceptions.ClientError as e: logging.error(e) return False return True<|docstring|>Upload a file to an S3 bucket :param file_name: File to upload :param bucket: Bucket to upload to :param key: S3 object name. If not specified then file_name is used :return: True if file was uploaded, else False<|endoftext|>
3d31612cbd005558ac8f15980e460750fdfb800ef30a96a41da39c4e50d41239
def download_file(self, bucket, key, file_name=None, extra_args={}): 'Download a file from an S3 bucket\n\n :param bucket: Bucket to download from\n :param key: S3 object name. If not specified then file_name is used\n :param file_name: File to upload\n :return: True if file was downloaded, else False\n ' if (file_name is None): file_name = key try: self.s3_client.download_file(bucket, key, file_name, ExtraArgs=extra_args) except botocore.exceptions.ClientError as e: logging.error(e) return False return True
Download a file from an S3 bucket :param bucket: Bucket to download from :param key: S3 object name. If not specified then file_name is used :param file_name: File to upload :return: True if file was downloaded, else False
lithops/storage/backends/ceph/ceph.py
download_file
ayalaraanan/lithops
55
python
def download_file(self, bucket, key, file_name=None, extra_args={}): 'Download a file from an S3 bucket\n\n :param bucket: Bucket to download from\n :param key: S3 object name. If not specified then file_name is used\n :param file_name: File to upload\n :return: True if file was downloaded, else False\n ' if (file_name is None): file_name = key try: self.s3_client.download_file(bucket, key, file_name, ExtraArgs=extra_args) except botocore.exceptions.ClientError as e: logging.error(e) return False return True
def download_file(self, bucket, key, file_name=None, extra_args={}): 'Download a file from an S3 bucket\n\n :param bucket: Bucket to download from\n :param key: S3 object name. If not specified then file_name is used\n :param file_name: File to upload\n :return: True if file was downloaded, else False\n ' if (file_name is None): file_name = key try: self.s3_client.download_file(bucket, key, file_name, ExtraArgs=extra_args) except botocore.exceptions.ClientError as e: logging.error(e) return False return True<|docstring|>Download a file from an S3 bucket :param bucket: Bucket to download from :param key: S3 object name. If not specified then file_name is used :param file_name: File to upload :return: True if file was downloaded, else False<|endoftext|>
58e76e7cdcd4b1815233037f6c24daddcf59d2c99ca2a691e55e9282c4282903
def head_object(self, bucket_name, key): '\n Head object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist.\n :param key: key of the object\n :return: Data of the object\n :rtype: str/bytes\n ' metadata = None retries = 0 while (metadata is None): try: metadata = self.s3_client.head_object(Bucket=bucket_name, Key=key) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('HEAD Object timeout. Retrying request') retries += 1 return metadata['ResponseMetadata']['HTTPHeaders']
Head object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist. :param key: key of the object :return: Data of the object :rtype: str/bytes
lithops/storage/backends/ceph/ceph.py
head_object
ayalaraanan/lithops
55
python
def head_object(self, bucket_name, key): '\n Head object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist.\n :param key: key of the object\n :return: Data of the object\n :rtype: str/bytes\n ' metadata = None retries = 0 while (metadata is None): try: metadata = self.s3_client.head_object(Bucket=bucket_name, Key=key) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('HEAD Object timeout. Retrying request') retries += 1 return metadata['ResponseMetadata']['HTTPHeaders']
def head_object(self, bucket_name, key): '\n Head object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist.\n :param key: key of the object\n :return: Data of the object\n :rtype: str/bytes\n ' metadata = None retries = 0 while (metadata is None): try: metadata = self.s3_client.head_object(Bucket=bucket_name, Key=key) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, key) else: raise e except botocore.exceptions.ReadTimeoutError as e: if (retries == OBJ_REQ_RETRIES): raise e logger.debug('HEAD Object timeout. Retrying request') retries += 1 return metadata['ResponseMetadata']['HTTPHeaders']<|docstring|>Head object from Ceph with a key. Throws StorageNoSuchKeyError if the given key does not exist. :param key: key of the object :return: Data of the object :rtype: str/bytes<|endoftext|>
bb1621234b746ce914042ec75f4f2c740bbe1daf48e31688433b39685e78c83e
def delete_object(self, bucket_name, key): '\n Delete an object from storage.\n :param bucket: bucket name\n :param key: data key\n ' return self.s3_client.delete_object(Bucket=bucket_name, Key=key)
Delete an object from storage. :param bucket: bucket name :param key: data key
lithops/storage/backends/ceph/ceph.py
delete_object
ayalaraanan/lithops
55
python
def delete_object(self, bucket_name, key): '\n Delete an object from storage.\n :param bucket: bucket name\n :param key: data key\n ' return self.s3_client.delete_object(Bucket=bucket_name, Key=key)
def delete_object(self, bucket_name, key): '\n Delete an object from storage.\n :param bucket: bucket name\n :param key: data key\n ' return self.s3_client.delete_object(Bucket=bucket_name, Key=key)<|docstring|>Delete an object from storage. :param bucket: bucket name :param key: data key<|endoftext|>
209823c3f0f8f32d454f600dd95687ffb053944f342963dd0ae9d7cc32edbea4
def delete_objects(self, bucket_name, key_list): '\n Delete a list of objects from storage.\n :param bucket: bucket name\n :param key_list: list of keys\n ' result = [] max_keys_num = 1000 for i in range(0, len(key_list), max_keys_num): delete_keys = {'Objects': []} delete_keys['Objects'] = [{'Key': k} for k in key_list[i:(i + max_keys_num)]] result.append(self.s3_client.delete_objects(Bucket=bucket_name, Delete=delete_keys)) return result
Delete a list of objects from storage. :param bucket: bucket name :param key_list: list of keys
lithops/storage/backends/ceph/ceph.py
delete_objects
ayalaraanan/lithops
55
python
def delete_objects(self, bucket_name, key_list): '\n Delete a list of objects from storage.\n :param bucket: bucket name\n :param key_list: list of keys\n ' result = [] max_keys_num = 1000 for i in range(0, len(key_list), max_keys_num): delete_keys = {'Objects': []} delete_keys['Objects'] = [{'Key': k} for k in key_list[i:(i + max_keys_num)]] result.append(self.s3_client.delete_objects(Bucket=bucket_name, Delete=delete_keys)) return result
def delete_objects(self, bucket_name, key_list): '\n Delete a list of objects from storage.\n :param bucket: bucket name\n :param key_list: list of keys\n ' result = [] max_keys_num = 1000 for i in range(0, len(key_list), max_keys_num): delete_keys = {'Objects': []} delete_keys['Objects'] = [{'Key': k} for k in key_list[i:(i + max_keys_num)]] result.append(self.s3_client.delete_objects(Bucket=bucket_name, Delete=delete_keys)) return result<|docstring|>Delete a list of objects from storage. :param bucket: bucket name :param key_list: list of keys<|endoftext|>
9edc6755d020be5a05d42517d25721616e7b5df98035d902671683e898dc6972
def head_bucket(self, bucket_name): '\n Head bucket from Ceph with a name. Throws StorageNoSuchKeyError if the given bucket does not exist.\n :param bucket_name: name of the bucket\n :return: Metadata of the bucket\n :rtype: str/bytes\n ' try: return self.s3_client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, '') else: raise e
Head bucket from Ceph with a name. Throws StorageNoSuchKeyError if the given bucket does not exist. :param bucket_name: name of the bucket :return: Metadata of the bucket :rtype: str/bytes
lithops/storage/backends/ceph/ceph.py
head_bucket
ayalaraanan/lithops
55
python
def head_bucket(self, bucket_name): '\n Head bucket from Ceph with a name. Throws StorageNoSuchKeyError if the given bucket does not exist.\n :param bucket_name: name of the bucket\n :return: Metadata of the bucket\n :rtype: str/bytes\n ' try: return self.s3_client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, ) else: raise e
def head_bucket(self, bucket_name): '\n Head bucket from Ceph with a name. Throws StorageNoSuchKeyError if the given bucket does not exist.\n :param bucket_name: name of the bucket\n :return: Metadata of the bucket\n :rtype: str/bytes\n ' try: return self.s3_client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, ) else: raise e<|docstring|>Head bucket from Ceph with a name. Throws StorageNoSuchKeyError if the given bucket does not exist. :param bucket_name: name of the bucket :return: Metadata of the bucket :rtype: str/bytes<|endoftext|>
7f26319207ddc30c0a726c77bf2ddc2e66bca9252a3ed2470eac8198d3317ed5
def list_objects(self, bucket_name, prefix=None): '\n Return a list of objects for the given bucket and prefix.\n :param bucket_name: Name of the bucket.\n :param prefix: Prefix to filter object names.\n :return: List of objects in bucket that match the given prefix.\n :rtype: list of str\n ' try: prefix = ('' if (prefix is None) else prefix) paginator = self.s3_client.get_paginator('list_objects_v2') page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix) object_list = [] for page in page_iterator: if ('Contents' in page): for item in page['Contents']: object_list.append(item) return object_list except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, ('' if (prefix is None) else prefix)) else: raise e
Return a list of objects for the given bucket and prefix. :param bucket_name: Name of the bucket. :param prefix: Prefix to filter object names. :return: List of objects in bucket that match the given prefix. :rtype: list of str
lithops/storage/backends/ceph/ceph.py
list_objects
ayalaraanan/lithops
55
python
def list_objects(self, bucket_name, prefix=None): '\n Return a list of objects for the given bucket and prefix.\n :param bucket_name: Name of the bucket.\n :param prefix: Prefix to filter object names.\n :return: List of objects in bucket that match the given prefix.\n :rtype: list of str\n ' try: prefix = ( if (prefix is None) else prefix) paginator = self.s3_client.get_paginator('list_objects_v2') page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix) object_list = [] for page in page_iterator: if ('Contents' in page): for item in page['Contents']: object_list.append(item) return object_list except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, ( if (prefix is None) else prefix)) else: raise e
def list_objects(self, bucket_name, prefix=None): '\n Return a list of objects for the given bucket and prefix.\n :param bucket_name: Name of the bucket.\n :param prefix: Prefix to filter object names.\n :return: List of objects in bucket that match the given prefix.\n :rtype: list of str\n ' try: prefix = ( if (prefix is None) else prefix) paginator = self.s3_client.get_paginator('list_objects_v2') page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix) object_list = [] for page in page_iterator: if ('Contents' in page): for item in page['Contents']: object_list.append(item) return object_list except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, ( if (prefix is None) else prefix)) else: raise e<|docstring|>Return a list of objects for the given bucket and prefix. :param bucket_name: Name of the bucket. :param prefix: Prefix to filter object names. :return: List of objects in bucket that match the given prefix. :rtype: list of str<|endoftext|>
cbb0f3aa53afbfa9cd0899c5785da32f7a56d0fa09f3768ebe6362b985b878c0
def list_keys(self, bucket_name, prefix=None): '\n Return a list of keys for the given prefix.\n :param bucket_name: Name of the bucket.\n :param prefix: Prefix to filter object names.\n :return: List of keys in bucket that match the given prefix.\n :rtype: list of str\n ' try: prefix = ('' if (prefix is None) else prefix) paginator = self.s3_client.get_paginator('list_objects_v2') page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix) key_list = [] for page in page_iterator: if ('Contents' in page): for item in page['Contents']: key_list.append(item['Key']) return key_list except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, prefix) else: raise e
Return a list of keys for the given prefix. :param bucket_name: Name of the bucket. :param prefix: Prefix to filter object names. :return: List of keys in bucket that match the given prefix. :rtype: list of str
lithops/storage/backends/ceph/ceph.py
list_keys
ayalaraanan/lithops
55
python
def list_keys(self, bucket_name, prefix=None): '\n Return a list of keys for the given prefix.\n :param bucket_name: Name of the bucket.\n :param prefix: Prefix to filter object names.\n :return: List of keys in bucket that match the given prefix.\n :rtype: list of str\n ' try: prefix = ( if (prefix is None) else prefix) paginator = self.s3_client.get_paginator('list_objects_v2') page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix) key_list = [] for page in page_iterator: if ('Contents' in page): for item in page['Contents']: key_list.append(item['Key']) return key_list except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, prefix) else: raise e
def list_keys(self, bucket_name, prefix=None): '\n Return a list of keys for the given prefix.\n :param bucket_name: Name of the bucket.\n :param prefix: Prefix to filter object names.\n :return: List of keys in bucket that match the given prefix.\n :rtype: list of str\n ' try: prefix = ( if (prefix is None) else prefix) paginator = self.s3_client.get_paginator('list_objects_v2') page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix) key_list = [] for page in page_iterator: if ('Contents' in page): for item in page['Contents']: key_list.append(item['Key']) return key_list except botocore.exceptions.ClientError as e: if (e.response['Error']['Code'] == '404'): raise StorageNoSuchKeyError(bucket_name, prefix) else: raise e<|docstring|>Return a list of keys for the given prefix. :param bucket_name: Name of the bucket. :param prefix: Prefix to filter object names. :return: List of keys in bucket that match the given prefix. :rtype: list of str<|endoftext|>
cca5bf781d801476e3b907d78e7898a72e812b45927cec4db05f2bb98471417c
def whitelister_element_rules(): 'Doc string.' return attribute_rules
Doc string.
tvof/core/wagtail_hooks.py
whitelister_element_rules
kingsdigitallab/tvof-django
0
python
def whitelister_element_rules(): return attribute_rules
def whitelister_element_rules(): return attribute_rules<|docstring|>Doc string.<|endoftext|>
e80bb5f589f8f61679c4a8b874bea650d5a1b4c754175eaa1dcef9e6cb35069e
@abc.abstractmethod def get_valor_imposto(self): ' Aplica taxa de imposto sobre um determinado valor do objeto ' pass
Aplica taxa de imposto sobre um determinado valor do objeto
orientacao_objetos_exercicios/exercicio4/tributavel.py
get_valor_imposto
montalvas/python
0
python
@abc.abstractmethod def get_valor_imposto(self): ' ' pass
@abc.abstractmethod def get_valor_imposto(self): ' ' pass<|docstring|>Aplica taxa de imposto sobre um determinado valor do objeto<|endoftext|>
4e4385ff83f5783417aa25d25673400f7658ffcfd74cb2bd3e37ccb66b02210e
@click.command(short_help='Clip a raster to given bounds.') @click.argument('files', nargs=(- 1), type=click.Path(), required=True, metavar='INPUT OUTPUT') @options.output_opt @options.bounds_opt @click.option('--like', type=click.Path(exists=True), help='Raster dataset to use as a template for bounds') @format_opt @options.creation_options @click.pass_context def clip(ctx, files, output, bounds, like, driver, creation_options): 'Clips a raster using bounds input directly or from a template raster.\n\n \x08\n $ rio clip input.tif output.tif --bounds xmin ymin xmax ymax\n $ rio clip input.tif output.tif --like template.tif\n\n If using --bounds, values must be in coordinate reference system of input.\n If using --like, bounds will automatically be transformed to match the\n coordinate reference system of the input.\n\n It can also be combined to read bounds of a feature dataset using Fiona:\n\n \x08\n $ rio clip input.tif output.tif --bounds $(fio info features.shp --bounds)\n\n ' from rasterio.warp import transform_bounds verbosity = ((ctx.obj and ctx.obj.get('verbosity')) or 1) with rasterio.Env(CPL_DEBUG=(verbosity > 2)): (output, files) = resolve_inout(files=files, output=output) input = files[0] with rasterio.open(input) as src: if bounds: if disjoint_bounds(bounds, src.bounds): raise click.BadParameter('must overlap the extent of the input raster', param='--bounds', param_hint='--bounds') elif like: with rasterio.open(like) as template_ds: bounds = template_ds.bounds if (template_ds.crs != src.crs): bounds = transform_bounds(template_ds.crs, src.crs, *bounds) if disjoint_bounds(bounds, src.bounds): raise click.BadParameter('must overlap the extent of the input raster', param='--like', param_hint='--like') else: raise click.UsageError('--bounds or --like required') window = src.window(*bounds) out_kwargs = src.meta.copy() out_kwargs.update({'driver': driver, 'height': (window[0][1] - window[0][0]), 'width': (window[1][1] - window[1][0]), 'transform': src.window_transform(window)}) out_kwargs.update(**creation_options) with rasterio.open(output, 'w', **out_kwargs) as out: out.write(src.read(window=window))
Clips a raster using bounds input directly or from a template raster.  $ rio clip input.tif output.tif --bounds xmin ymin xmax ymax $ rio clip input.tif output.tif --like template.tif If using --bounds, values must be in coordinate reference system of input. If using --like, bounds will automatically be transformed to match the coordinate reference system of the input. It can also be combined to read bounds of a feature dataset using Fiona:  $ rio clip input.tif output.tif --bounds $(fio info features.shp --bounds)
Rasterio_osgeo_shapely_PIL_pyproj_numpy/source/rasterio/rio/clip.py
clip
saidie/lambda-packs
1
python
@click.command(short_help='Clip a raster to given bounds.') @click.argument('files', nargs=(- 1), type=click.Path(), required=True, metavar='INPUT OUTPUT') @options.output_opt @options.bounds_opt @click.option('--like', type=click.Path(exists=True), help='Raster dataset to use as a template for bounds') @format_opt @options.creation_options @click.pass_context def clip(ctx, files, output, bounds, like, driver, creation_options): 'Clips a raster using bounds input directly or from a template raster.\n\n \x08\n $ rio clip input.tif output.tif --bounds xmin ymin xmax ymax\n $ rio clip input.tif output.tif --like template.tif\n\n If using --bounds, values must be in coordinate reference system of input.\n If using --like, bounds will automatically be transformed to match the\n coordinate reference system of the input.\n\n It can also be combined to read bounds of a feature dataset using Fiona:\n\n \x08\n $ rio clip input.tif output.tif --bounds $(fio info features.shp --bounds)\n\n ' from rasterio.warp import transform_bounds verbosity = ((ctx.obj and ctx.obj.get('verbosity')) or 1) with rasterio.Env(CPL_DEBUG=(verbosity > 2)): (output, files) = resolve_inout(files=files, output=output) input = files[0] with rasterio.open(input) as src: if bounds: if disjoint_bounds(bounds, src.bounds): raise click.BadParameter('must overlap the extent of the input raster', param='--bounds', param_hint='--bounds') elif like: with rasterio.open(like) as template_ds: bounds = template_ds.bounds if (template_ds.crs != src.crs): bounds = transform_bounds(template_ds.crs, src.crs, *bounds) if disjoint_bounds(bounds, src.bounds): raise click.BadParameter('must overlap the extent of the input raster', param='--like', param_hint='--like') else: raise click.UsageError('--bounds or --like required') window = src.window(*bounds) out_kwargs = src.meta.copy() out_kwargs.update({'driver': driver, 'height': (window[0][1] - window[0][0]), 'width': (window[1][1] - window[1][0]), 'transform': src.window_transform(window)}) out_kwargs.update(**creation_options) with rasterio.open(output, 'w', **out_kwargs) as out: out.write(src.read(window=window))
@click.command(short_help='Clip a raster to given bounds.') @click.argument('files', nargs=(- 1), type=click.Path(), required=True, metavar='INPUT OUTPUT') @options.output_opt @options.bounds_opt @click.option('--like', type=click.Path(exists=True), help='Raster dataset to use as a template for bounds') @format_opt @options.creation_options @click.pass_context def clip(ctx, files, output, bounds, like, driver, creation_options): 'Clips a raster using bounds input directly or from a template raster.\n\n \x08\n $ rio clip input.tif output.tif --bounds xmin ymin xmax ymax\n $ rio clip input.tif output.tif --like template.tif\n\n If using --bounds, values must be in coordinate reference system of input.\n If using --like, bounds will automatically be transformed to match the\n coordinate reference system of the input.\n\n It can also be combined to read bounds of a feature dataset using Fiona:\n\n \x08\n $ rio clip input.tif output.tif --bounds $(fio info features.shp --bounds)\n\n ' from rasterio.warp import transform_bounds verbosity = ((ctx.obj and ctx.obj.get('verbosity')) or 1) with rasterio.Env(CPL_DEBUG=(verbosity > 2)): (output, files) = resolve_inout(files=files, output=output) input = files[0] with rasterio.open(input) as src: if bounds: if disjoint_bounds(bounds, src.bounds): raise click.BadParameter('must overlap the extent of the input raster', param='--bounds', param_hint='--bounds') elif like: with rasterio.open(like) as template_ds: bounds = template_ds.bounds if (template_ds.crs != src.crs): bounds = transform_bounds(template_ds.crs, src.crs, *bounds) if disjoint_bounds(bounds, src.bounds): raise click.BadParameter('must overlap the extent of the input raster', param='--like', param_hint='--like') else: raise click.UsageError('--bounds or --like required') window = src.window(*bounds) out_kwargs = src.meta.copy() out_kwargs.update({'driver': driver, 'height': (window[0][1] - window[0][0]), 'width': (window[1][1] - window[1][0]), 'transform': src.window_transform(window)}) out_kwargs.update(**creation_options) with rasterio.open(output, 'w', **out_kwargs) as out: out.write(src.read(window=window))<|docstring|>Clips a raster using bounds input directly or from a template raster.  $ rio clip input.tif output.tif --bounds xmin ymin xmax ymax $ rio clip input.tif output.tif --like template.tif If using --bounds, values must be in coordinate reference system of input. If using --like, bounds will automatically be transformed to match the coordinate reference system of the input. It can also be combined to read bounds of a feature dataset using Fiona:  $ rio clip input.tif output.tif --bounds $(fio info features.shp --bounds)<|endoftext|>
7e9728146b7ceeffbe687c29ff55c76aaacaa86746a288c57b256753472a65e8
def check_clickhouse_version(version): 'Compare ClickHouse version.' def check(test): if (getattr(test.context, 'clickhouse_version', None) is None): return False clickhouse_version = pkg_version.parse(str(test.context.clickhouse_version)) if version.startswith('=='): return (clickhouse_version == pkg_version.parse(str(version.split('==', 1)[(- 1)]))) elif version.startswith('>='): return (clickhouse_version >= pkg_version.parse(str(version.split('>=', 1)[(- 1)]))) elif version.startswith('<='): return (clickhouse_version <= pkg_version.parse(str(version.split('<=', 1)[(- 1)]))) elif version.startswith('='): return (clickhouse_version == pkg_version.parse(str(version.split('=', 1)[(- 1)]))) elif version.startswith('>'): return (clickhouse_version > pkg_version.parse(str(version.split('>', 1)[(- 1)]))) elif version.startswith('<'): return (clickhouse_version < pkg_version.parse(str(version.split('<', 1)[(- 1)]))) else: return (clickhouse_version == pkg_version.parse(str(version))) return check
Compare ClickHouse version.
tests/testflows/helpers/common.py
check_clickhouse_version
psyoblade/ClickHouse
1
python
def check_clickhouse_version(version): def check(test): if (getattr(test.context, 'clickhouse_version', None) is None): return False clickhouse_version = pkg_version.parse(str(test.context.clickhouse_version)) if version.startswith('=='): return (clickhouse_version == pkg_version.parse(str(version.split('==', 1)[(- 1)]))) elif version.startswith('>='): return (clickhouse_version >= pkg_version.parse(str(version.split('>=', 1)[(- 1)]))) elif version.startswith('<='): return (clickhouse_version <= pkg_version.parse(str(version.split('<=', 1)[(- 1)]))) elif version.startswith('='): return (clickhouse_version == pkg_version.parse(str(version.split('=', 1)[(- 1)]))) elif version.startswith('>'): return (clickhouse_version > pkg_version.parse(str(version.split('>', 1)[(- 1)]))) elif version.startswith('<'): return (clickhouse_version < pkg_version.parse(str(version.split('<', 1)[(- 1)]))) else: return (clickhouse_version == pkg_version.parse(str(version))) return check
def check_clickhouse_version(version): def check(test): if (getattr(test.context, 'clickhouse_version', None) is None): return False clickhouse_version = pkg_version.parse(str(test.context.clickhouse_version)) if version.startswith('=='): return (clickhouse_version == pkg_version.parse(str(version.split('==', 1)[(- 1)]))) elif version.startswith('>='): return (clickhouse_version >= pkg_version.parse(str(version.split('>=', 1)[(- 1)]))) elif version.startswith('<='): return (clickhouse_version <= pkg_version.parse(str(version.split('<=', 1)[(- 1)]))) elif version.startswith('='): return (clickhouse_version == pkg_version.parse(str(version.split('=', 1)[(- 1)]))) elif version.startswith('>'): return (clickhouse_version > pkg_version.parse(str(version.split('>', 1)[(- 1)]))) elif version.startswith('<'): return (clickhouse_version < pkg_version.parse(str(version.split('<', 1)[(- 1)]))) else: return (clickhouse_version == pkg_version.parse(str(version))) return check<|docstring|>Compare ClickHouse version.<|endoftext|>
03b6bb68b2acdd491b0b1e22067a087b0acacbe2285f135c97683e1158e61724
@TestStep(Given) def instrument_clickhouse_server_log(self, node=None, test=None, clickhouse_server_log='/var/log/clickhouse-server/clickhouse-server.log', always_dump=False): 'Instrument clickhouse-server.log for the current test (default)\n by adding start and end messages that include test name to log\n of the specified node. If we are in the debug mode and the test\n fails then dump the messages from the log for this test.\n\n :param always_dump: always dump clickhouse log after test, default: `False`\n ' if (test is None): test = current() if (node is None): node = self.context.node with By('getting current log size'): cmd = node.command(f'stat --format=%s {clickhouse_server_log}') if (cmd.output == f"stat: cannot stat '{clickhouse_server_log}': No such file or directory"): start_logsize = 0 else: start_logsize = cmd.output.split(' ')[0].strip() try: with And('adding test name start message to the clickhouse-server.log'): node.command(f'echo -e "\n-- start: {test.name} --\n" >> {clickhouse_server_log}') (yield) finally: if (test.terminating is True): return with Finally('adding test name end message to the clickhouse-server.log', flags=TE): node.command(f'echo -e "\n-- end: {test.name} --\n" >> {clickhouse_server_log}') with And('getting current log size at the end of the test'): cmd = node.command(f'stat --format=%s {clickhouse_server_log}') end_logsize = cmd.output.split(' ')[0].strip() dump_log = (always_dump or (settings.debug and (not self.parent.result))) if dump_log: with Then('dumping clickhouse-server.log for this test'): node.command(f'tail -c +{start_logsize} {clickhouse_server_log} | head -c {(int(end_logsize) - int(start_logsize))}')
Instrument clickhouse-server.log for the current test (default) by adding start and end messages that include test name to log of the specified node. If we are in the debug mode and the test fails then dump the messages from the log for this test. :param always_dump: always dump clickhouse log after test, default: `False`
tests/testflows/helpers/common.py
instrument_clickhouse_server_log
psyoblade/ClickHouse
1
python
@TestStep(Given) def instrument_clickhouse_server_log(self, node=None, test=None, clickhouse_server_log='/var/log/clickhouse-server/clickhouse-server.log', always_dump=False): 'Instrument clickhouse-server.log for the current test (default)\n by adding start and end messages that include test name to log\n of the specified node. If we are in the debug mode and the test\n fails then dump the messages from the log for this test.\n\n :param always_dump: always dump clickhouse log after test, default: `False`\n ' if (test is None): test = current() if (node is None): node = self.context.node with By('getting current log size'): cmd = node.command(f'stat --format=%s {clickhouse_server_log}') if (cmd.output == f"stat: cannot stat '{clickhouse_server_log}': No such file or directory"): start_logsize = 0 else: start_logsize = cmd.output.split(' ')[0].strip() try: with And('adding test name start message to the clickhouse-server.log'): node.command(f'echo -e "\n-- start: {test.name} --\n" >> {clickhouse_server_log}') (yield) finally: if (test.terminating is True): return with Finally('adding test name end message to the clickhouse-server.log', flags=TE): node.command(f'echo -e "\n-- end: {test.name} --\n" >> {clickhouse_server_log}') with And('getting current log size at the end of the test'): cmd = node.command(f'stat --format=%s {clickhouse_server_log}') end_logsize = cmd.output.split(' ')[0].strip() dump_log = (always_dump or (settings.debug and (not self.parent.result))) if dump_log: with Then('dumping clickhouse-server.log for this test'): node.command(f'tail -c +{start_logsize} {clickhouse_server_log} | head -c {(int(end_logsize) - int(start_logsize))}')
@TestStep(Given) def instrument_clickhouse_server_log(self, node=None, test=None, clickhouse_server_log='/var/log/clickhouse-server/clickhouse-server.log', always_dump=False): 'Instrument clickhouse-server.log for the current test (default)\n by adding start and end messages that include test name to log\n of the specified node. If we are in the debug mode and the test\n fails then dump the messages from the log for this test.\n\n :param always_dump: always dump clickhouse log after test, default: `False`\n ' if (test is None): test = current() if (node is None): node = self.context.node with By('getting current log size'): cmd = node.command(f'stat --format=%s {clickhouse_server_log}') if (cmd.output == f"stat: cannot stat '{clickhouse_server_log}': No such file or directory"): start_logsize = 0 else: start_logsize = cmd.output.split(' ')[0].strip() try: with And('adding test name start message to the clickhouse-server.log'): node.command(f'echo -e "\n-- start: {test.name} --\n" >> {clickhouse_server_log}') (yield) finally: if (test.terminating is True): return with Finally('adding test name end message to the clickhouse-server.log', flags=TE): node.command(f'echo -e "\n-- end: {test.name} --\n" >> {clickhouse_server_log}') with And('getting current log size at the end of the test'): cmd = node.command(f'stat --format=%s {clickhouse_server_log}') end_logsize = cmd.output.split(' ')[0].strip() dump_log = (always_dump or (settings.debug and (not self.parent.result))) if dump_log: with Then('dumping clickhouse-server.log for this test'): node.command(f'tail -c +{start_logsize} {clickhouse_server_log} | head -c {(int(end_logsize) - int(start_logsize))}')<|docstring|>Instrument clickhouse-server.log for the current test (default) by adding start and end messages that include test name to log of the specified node. If we are in the debug mode and the test fails then dump the messages from the log for this test. :param always_dump: always dump clickhouse log after test, default: `False`<|endoftext|>
7113a4a75b5d4b7d87a4d3e202756edcb4f6ee59423401d451cc0160694accc7
def create_xml_config_content(entries, config_file, config_d_dir='/etc/clickhouse-server/config.d'): 'Create XML configuration file from a dictionary.\n\n :param entries: dictionary that defines xml\n :param config_file: name of the config file\n :param config_d_dir: config.d directory path, default: `/etc/clickhouse-server/config.d`\n ' uid = getuid() path = os.path.join(config_d_dir, config_file) name = config_file root = xmltree.Element('clickhouse') root.append(xmltree.Comment(text=f'config uid: {uid}')) def create_xml_tree(entries, root): for (k, v) in entries.items(): if isinstance(k, KeyWithAttributes): xml_element = xmltree.Element(k.name) for (attr_name, attr_value) in k.attributes.items(): xml_element.set(attr_name, attr_value) if (type(v) is dict): create_xml_tree(v, xml_element) elif (type(v) in (list, tuple)): for e in v: create_xml_tree(e, xml_element) else: xml_element.text = v root.append(xml_element) elif (type(v) is dict): xml_element = xmltree.Element(k) create_xml_tree(v, xml_element) root.append(xml_element) elif (type(v) in (list, tuple)): xml_element = xmltree.Element(k) for e in v: create_xml_tree(e, xml_element) root.append(xml_element) else: xml_append(root, k, v) create_xml_tree(entries, root) xml_indent(root) content = (xml_with_utf8 + str(xmltree.tostring(root, short_empty_elements=False, encoding='utf-8'), 'utf-8')) return Config(content, path, name, uid, 'config.xml')
Create XML configuration file from a dictionary. :param entries: dictionary that defines xml :param config_file: name of the config file :param config_d_dir: config.d directory path, default: `/etc/clickhouse-server/config.d`
tests/testflows/helpers/common.py
create_xml_config_content
psyoblade/ClickHouse
1
python
def create_xml_config_content(entries, config_file, config_d_dir='/etc/clickhouse-server/config.d'): 'Create XML configuration file from a dictionary.\n\n :param entries: dictionary that defines xml\n :param config_file: name of the config file\n :param config_d_dir: config.d directory path, default: `/etc/clickhouse-server/config.d`\n ' uid = getuid() path = os.path.join(config_d_dir, config_file) name = config_file root = xmltree.Element('clickhouse') root.append(xmltree.Comment(text=f'config uid: {uid}')) def create_xml_tree(entries, root): for (k, v) in entries.items(): if isinstance(k, KeyWithAttributes): xml_element = xmltree.Element(k.name) for (attr_name, attr_value) in k.attributes.items(): xml_element.set(attr_name, attr_value) if (type(v) is dict): create_xml_tree(v, xml_element) elif (type(v) in (list, tuple)): for e in v: create_xml_tree(e, xml_element) else: xml_element.text = v root.append(xml_element) elif (type(v) is dict): xml_element = xmltree.Element(k) create_xml_tree(v, xml_element) root.append(xml_element) elif (type(v) in (list, tuple)): xml_element = xmltree.Element(k) for e in v: create_xml_tree(e, xml_element) root.append(xml_element) else: xml_append(root, k, v) create_xml_tree(entries, root) xml_indent(root) content = (xml_with_utf8 + str(xmltree.tostring(root, short_empty_elements=False, encoding='utf-8'), 'utf-8')) return Config(content, path, name, uid, 'config.xml')
def create_xml_config_content(entries, config_file, config_d_dir='/etc/clickhouse-server/config.d'): 'Create XML configuration file from a dictionary.\n\n :param entries: dictionary that defines xml\n :param config_file: name of the config file\n :param config_d_dir: config.d directory path, default: `/etc/clickhouse-server/config.d`\n ' uid = getuid() path = os.path.join(config_d_dir, config_file) name = config_file root = xmltree.Element('clickhouse') root.append(xmltree.Comment(text=f'config uid: {uid}')) def create_xml_tree(entries, root): for (k, v) in entries.items(): if isinstance(k, KeyWithAttributes): xml_element = xmltree.Element(k.name) for (attr_name, attr_value) in k.attributes.items(): xml_element.set(attr_name, attr_value) if (type(v) is dict): create_xml_tree(v, xml_element) elif (type(v) in (list, tuple)): for e in v: create_xml_tree(e, xml_element) else: xml_element.text = v root.append(xml_element) elif (type(v) is dict): xml_element = xmltree.Element(k) create_xml_tree(v, xml_element) root.append(xml_element) elif (type(v) in (list, tuple)): xml_element = xmltree.Element(k) for e in v: create_xml_tree(e, xml_element) root.append(xml_element) else: xml_append(root, k, v) create_xml_tree(entries, root) xml_indent(root) content = (xml_with_utf8 + str(xmltree.tostring(root, short_empty_elements=False, encoding='utf-8'), 'utf-8')) return Config(content, path, name, uid, 'config.xml')<|docstring|>Create XML configuration file from a dictionary. :param entries: dictionary that defines xml :param config_file: name of the config file :param config_d_dir: config.d directory path, default: `/etc/clickhouse-server/config.d`<|endoftext|>
f2eec63f2915736cea7e97326ac23d26c0d4fa04c78a4e56b241463f8c6652e0
def add_invalid_config(config, message, recover_config=None, tail=30, timeout=300, restart=True, user=None): 'Check that ClickHouse errors when trying to load invalid configuration file.' cluster = current().context.cluster node = current().context.node try: with Given('I prepare the error log by writing empty lines into it'): node.command(('echo -e "%s" > /var/log/clickhouse-server/clickhouse-server.err.log' % ('-\\n' * tail))) with When('I add the config', description=config.path): command = f'''cat <<HEREDOC > {config.path} {config.content} HEREDOC''' node.command(command, steps=False, exitcode=0) with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): started = time.time() command = f"cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name} | grep {config.uid}{(' > /dev/null' if (not settings.debug) else '')}" while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if (exitcode == 0): break time.sleep(1) assert (exitcode == 0), error() if restart: with When('I restart ClickHouse to apply the config changes'): node.restart_clickhouse(safe=False, wait_healthy=False, user=user) finally: if (recover_config is None): with Finally(f'I remove {config.name}'): with By('removing invalid configuration file'): system_config_path = os.path.join(cluster.environ['CLICKHOUSE_TESTS_DIR'], 'configs', node.name, 'config.d', config.path.split('config.d/')[(- 1)]) cluster.command(None, f'rm -rf {system_config_path}', timeout=timeout, exitcode=0) if restart: with And('restarting ClickHouse'): node.restart_clickhouse(safe=False, user=user) node.restart_clickhouse(safe=False, user=user) else: with Finally(f'I change {config.name}'): with By('changing invalid configuration file'): system_config_path = os.path.join(cluster.environ['CLICKHOUSE_TESTS_DIR'], 'configs', node.name, 'config.d', config.path.split('config.d/')[(- 1)]) cluster.command(None, f'rm -rf {system_config_path}', timeout=timeout, exitcode=0) command = f'''cat <<HEREDOC > {system_config_path} {recover_config.content} HEREDOC''' cluster.command(None, command, timeout=timeout, exitcode=0) if restart: with And('restarting ClickHouse'): node.restart_clickhouse(safe=False, user=user) with Then('error log should contain the expected error message'): started = time.time() command = f'tail -n {tail} /var/log/clickhouse-server/clickhouse-server.err.log | grep "{message}"' while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if (exitcode == 0): break time.sleep(1) assert (exitcode == 0), error()
Check that ClickHouse errors when trying to load invalid configuration file.
tests/testflows/helpers/common.py
add_invalid_config
psyoblade/ClickHouse
1
python
def add_invalid_config(config, message, recover_config=None, tail=30, timeout=300, restart=True, user=None): cluster = current().context.cluster node = current().context.node try: with Given('I prepare the error log by writing empty lines into it'): node.command(('echo -e "%s" > /var/log/clickhouse-server/clickhouse-server.err.log' % ('-\\n' * tail))) with When('I add the config', description=config.path): command = f'cat <<HEREDOC > {config.path} {config.content} HEREDOC' node.command(command, steps=False, exitcode=0) with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): started = time.time() command = f"cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name} | grep {config.uid}{(' > /dev/null' if (not settings.debug) else )}" while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if (exitcode == 0): break time.sleep(1) assert (exitcode == 0), error() if restart: with When('I restart ClickHouse to apply the config changes'): node.restart_clickhouse(safe=False, wait_healthy=False, user=user) finally: if (recover_config is None): with Finally(f'I remove {config.name}'): with By('removing invalid configuration file'): system_config_path = os.path.join(cluster.environ['CLICKHOUSE_TESTS_DIR'], 'configs', node.name, 'config.d', config.path.split('config.d/')[(- 1)]) cluster.command(None, f'rm -rf {system_config_path}', timeout=timeout, exitcode=0) if restart: with And('restarting ClickHouse'): node.restart_clickhouse(safe=False, user=user) node.restart_clickhouse(safe=False, user=user) else: with Finally(f'I change {config.name}'): with By('changing invalid configuration file'): system_config_path = os.path.join(cluster.environ['CLICKHOUSE_TESTS_DIR'], 'configs', node.name, 'config.d', config.path.split('config.d/')[(- 1)]) cluster.command(None, f'rm -rf {system_config_path}', timeout=timeout, exitcode=0) command = f'cat <<HEREDOC > {system_config_path} {recover_config.content} HEREDOC' cluster.command(None, command, timeout=timeout, exitcode=0) if restart: with And('restarting ClickHouse'): node.restart_clickhouse(safe=False, user=user) with Then('error log should contain the expected error message'): started = time.time() command = f'tail -n {tail} /var/log/clickhouse-server/clickhouse-server.err.log | grep "{message}"' while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if (exitcode == 0): break time.sleep(1) assert (exitcode == 0), error()
def add_invalid_config(config, message, recover_config=None, tail=30, timeout=300, restart=True, user=None): cluster = current().context.cluster node = current().context.node try: with Given('I prepare the error log by writing empty lines into it'): node.command(('echo -e "%s" > /var/log/clickhouse-server/clickhouse-server.err.log' % ('-\\n' * tail))) with When('I add the config', description=config.path): command = f'cat <<HEREDOC > {config.path} {config.content} HEREDOC' node.command(command, steps=False, exitcode=0) with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): started = time.time() command = f"cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name} | grep {config.uid}{(' > /dev/null' if (not settings.debug) else )}" while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if (exitcode == 0): break time.sleep(1) assert (exitcode == 0), error() if restart: with When('I restart ClickHouse to apply the config changes'): node.restart_clickhouse(safe=False, wait_healthy=False, user=user) finally: if (recover_config is None): with Finally(f'I remove {config.name}'): with By('removing invalid configuration file'): system_config_path = os.path.join(cluster.environ['CLICKHOUSE_TESTS_DIR'], 'configs', node.name, 'config.d', config.path.split('config.d/')[(- 1)]) cluster.command(None, f'rm -rf {system_config_path}', timeout=timeout, exitcode=0) if restart: with And('restarting ClickHouse'): node.restart_clickhouse(safe=False, user=user) node.restart_clickhouse(safe=False, user=user) else: with Finally(f'I change {config.name}'): with By('changing invalid configuration file'): system_config_path = os.path.join(cluster.environ['CLICKHOUSE_TESTS_DIR'], 'configs', node.name, 'config.d', config.path.split('config.d/')[(- 1)]) cluster.command(None, f'rm -rf {system_config_path}', timeout=timeout, exitcode=0) command = f'cat <<HEREDOC > {system_config_path} {recover_config.content} HEREDOC' cluster.command(None, command, timeout=timeout, exitcode=0) if restart: with And('restarting ClickHouse'): node.restart_clickhouse(safe=False, user=user) with Then('error log should contain the expected error message'): started = time.time() command = f'tail -n {tail} /var/log/clickhouse-server/clickhouse-server.err.log | grep "{message}"' while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if (exitcode == 0): break time.sleep(1) assert (exitcode == 0), error()<|docstring|>Check that ClickHouse errors when trying to load invalid configuration file.<|endoftext|>
effb516ec1582e6654f6408fe95d7baf759534cee594c2dc518783c9f31aa4ae
def add_config(config, timeout=300, restart=False, modify=False, node=None, user=None, wait_healthy=True, check_preprocessed=True): 'Add dynamic configuration file to ClickHouse.\n\n :param config: configuration file description\n :param timeout: timeout, default: 300 sec\n :param restart: restart server, default: False\n :param modify: only modify configuration file, default: False\n ' if (node is None): node = current().context.node cluster = current().context.cluster def check_preprocessed_config_is_updated(after_removal=False): 'Check that preprocessed config is updated.' started = time.time() command = f"cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name} | grep {config.uid}{(' > /dev/null' if (not settings.debug) else '')}" while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if after_removal: if (exitcode == 1): break elif (exitcode == 0): break time.sleep(1) if settings.debug: node.command(f'cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name}') if after_removal: assert (exitcode == 1), error() else: assert (exitcode == 0), error() def wait_for_config_to_be_loaded(user=None): 'Wait for config to be loaded.' if restart: with When('I close terminal to the node to be restarted'): bash.close() with And('I stop ClickHouse to apply the config changes'): node.stop_clickhouse(safe=False) with And('I get the current log size'): cmd = node.cluster.command(None, f"stat --format=%s {cluster.environ['CLICKHOUSE_TESTS_DIR']}/_instances/{node.name}/logs/clickhouse-server.log") logsize = cmd.output.split(' ')[0].strip() with And('I start ClickHouse back up'): node.start_clickhouse(user=user, wait_healthy=wait_healthy) with Then('I tail the log file from using previous log size as the offset'): bash.prompt = bash.__class__.prompt bash.open() bash.send(f'tail -c +{logsize} -f /var/log/clickhouse-server/clickhouse-server.log') with Then('I wait for config reload message in the log file'): if restart: bash.expect(f"ConfigReloader: Loaded config '/etc/clickhouse-server/config.xml', performed update on configuration", timeout=timeout) else: bash.expect(f"ConfigReloader: Loaded config '/etc/clickhouse-server/{config.preprocessed_name}', performed update on configuration", timeout=timeout) try: with Given(f'{config.name}'): if settings.debug: with When('I output the content of the config'): debug(config.content) with node.cluster.shell(node.name) as bash: bash.expect(bash.prompt) bash.send('tail -v -n 0 -f /var/log/clickhouse-server/clickhouse-server.log') bash.expect('<==') bash.expect('\n') with When('I add the config', description=config.path): command = f'''cat <<HEREDOC > {config.path} {config.content} HEREDOC''' node.command(command, steps=False, exitcode=0) if check_preprocessed: with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): check_preprocessed_config_is_updated() with And('I wait for config to be reloaded'): wait_for_config_to_be_loaded(user=user) (yield) finally: if (not modify): with Finally(f'I remove {config.name} on {node.name}'): with node.cluster.shell(node.name) as bash: bash.expect(bash.prompt) bash.send('tail -v -n 0 -f /var/log/clickhouse-server/clickhouse-server.log') bash.expect('<==') bash.expect('\n') with By('removing the config file', description=config.path): node.command(f'rm -rf {config.path}', exitcode=0) with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): check_preprocessed_config_is_updated(after_removal=True) with And('I wait for config to be reloaded'): wait_for_config_to_be_loaded()
Add dynamic configuration file to ClickHouse. :param config: configuration file description :param timeout: timeout, default: 300 sec :param restart: restart server, default: False :param modify: only modify configuration file, default: False
tests/testflows/helpers/common.py
add_config
psyoblade/ClickHouse
1
python
def add_config(config, timeout=300, restart=False, modify=False, node=None, user=None, wait_healthy=True, check_preprocessed=True): 'Add dynamic configuration file to ClickHouse.\n\n :param config: configuration file description\n :param timeout: timeout, default: 300 sec\n :param restart: restart server, default: False\n :param modify: only modify configuration file, default: False\n ' if (node is None): node = current().context.node cluster = current().context.cluster def check_preprocessed_config_is_updated(after_removal=False): 'Check that preprocessed config is updated.' started = time.time() command = f"cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name} | grep {config.uid}{(' > /dev/null' if (not settings.debug) else )}" while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if after_removal: if (exitcode == 1): break elif (exitcode == 0): break time.sleep(1) if settings.debug: node.command(f'cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name}') if after_removal: assert (exitcode == 1), error() else: assert (exitcode == 0), error() def wait_for_config_to_be_loaded(user=None): 'Wait for config to be loaded.' if restart: with When('I close terminal to the node to be restarted'): bash.close() with And('I stop ClickHouse to apply the config changes'): node.stop_clickhouse(safe=False) with And('I get the current log size'): cmd = node.cluster.command(None, f"stat --format=%s {cluster.environ['CLICKHOUSE_TESTS_DIR']}/_instances/{node.name}/logs/clickhouse-server.log") logsize = cmd.output.split(' ')[0].strip() with And('I start ClickHouse back up'): node.start_clickhouse(user=user, wait_healthy=wait_healthy) with Then('I tail the log file from using previous log size as the offset'): bash.prompt = bash.__class__.prompt bash.open() bash.send(f'tail -c +{logsize} -f /var/log/clickhouse-server/clickhouse-server.log') with Then('I wait for config reload message in the log file'): if restart: bash.expect(f"ConfigReloader: Loaded config '/etc/clickhouse-server/config.xml', performed update on configuration", timeout=timeout) else: bash.expect(f"ConfigReloader: Loaded config '/etc/clickhouse-server/{config.preprocessed_name}', performed update on configuration", timeout=timeout) try: with Given(f'{config.name}'): if settings.debug: with When('I output the content of the config'): debug(config.content) with node.cluster.shell(node.name) as bash: bash.expect(bash.prompt) bash.send('tail -v -n 0 -f /var/log/clickhouse-server/clickhouse-server.log') bash.expect('<==') bash.expect('\n') with When('I add the config', description=config.path): command = f'cat <<HEREDOC > {config.path} {config.content} HEREDOC' node.command(command, steps=False, exitcode=0) if check_preprocessed: with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): check_preprocessed_config_is_updated() with And('I wait for config to be reloaded'): wait_for_config_to_be_loaded(user=user) (yield) finally: if (not modify): with Finally(f'I remove {config.name} on {node.name}'): with node.cluster.shell(node.name) as bash: bash.expect(bash.prompt) bash.send('tail -v -n 0 -f /var/log/clickhouse-server/clickhouse-server.log') bash.expect('<==') bash.expect('\n') with By('removing the config file', description=config.path): node.command(f'rm -rf {config.path}', exitcode=0) with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): check_preprocessed_config_is_updated(after_removal=True) with And('I wait for config to be reloaded'): wait_for_config_to_be_loaded()
def add_config(config, timeout=300, restart=False, modify=False, node=None, user=None, wait_healthy=True, check_preprocessed=True): 'Add dynamic configuration file to ClickHouse.\n\n :param config: configuration file description\n :param timeout: timeout, default: 300 sec\n :param restart: restart server, default: False\n :param modify: only modify configuration file, default: False\n ' if (node is None): node = current().context.node cluster = current().context.cluster def check_preprocessed_config_is_updated(after_removal=False): 'Check that preprocessed config is updated.' started = time.time() command = f"cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name} | grep {config.uid}{(' > /dev/null' if (not settings.debug) else )}" while ((time.time() - started) < timeout): exitcode = node.command(command, steps=False).exitcode if after_removal: if (exitcode == 1): break elif (exitcode == 0): break time.sleep(1) if settings.debug: node.command(f'cat /var/lib/clickhouse/preprocessed_configs/{config.preprocessed_name}') if after_removal: assert (exitcode == 1), error() else: assert (exitcode == 0), error() def wait_for_config_to_be_loaded(user=None): 'Wait for config to be loaded.' if restart: with When('I close terminal to the node to be restarted'): bash.close() with And('I stop ClickHouse to apply the config changes'): node.stop_clickhouse(safe=False) with And('I get the current log size'): cmd = node.cluster.command(None, f"stat --format=%s {cluster.environ['CLICKHOUSE_TESTS_DIR']}/_instances/{node.name}/logs/clickhouse-server.log") logsize = cmd.output.split(' ')[0].strip() with And('I start ClickHouse back up'): node.start_clickhouse(user=user, wait_healthy=wait_healthy) with Then('I tail the log file from using previous log size as the offset'): bash.prompt = bash.__class__.prompt bash.open() bash.send(f'tail -c +{logsize} -f /var/log/clickhouse-server/clickhouse-server.log') with Then('I wait for config reload message in the log file'): if restart: bash.expect(f"ConfigReloader: Loaded config '/etc/clickhouse-server/config.xml', performed update on configuration", timeout=timeout) else: bash.expect(f"ConfigReloader: Loaded config '/etc/clickhouse-server/{config.preprocessed_name}', performed update on configuration", timeout=timeout) try: with Given(f'{config.name}'): if settings.debug: with When('I output the content of the config'): debug(config.content) with node.cluster.shell(node.name) as bash: bash.expect(bash.prompt) bash.send('tail -v -n 0 -f /var/log/clickhouse-server/clickhouse-server.log') bash.expect('<==') bash.expect('\n') with When('I add the config', description=config.path): command = f'cat <<HEREDOC > {config.path} {config.content} HEREDOC' node.command(command, steps=False, exitcode=0) if check_preprocessed: with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): check_preprocessed_config_is_updated() with And('I wait for config to be reloaded'): wait_for_config_to_be_loaded(user=user) (yield) finally: if (not modify): with Finally(f'I remove {config.name} on {node.name}'): with node.cluster.shell(node.name) as bash: bash.expect(bash.prompt) bash.send('tail -v -n 0 -f /var/log/clickhouse-server/clickhouse-server.log') bash.expect('<==') bash.expect('\n') with By('removing the config file', description=config.path): node.command(f'rm -rf {config.path}', exitcode=0) with Then(f'{config.preprocessed_name} should be updated', description=f'timeout {timeout}'): check_preprocessed_config_is_updated(after_removal=True) with And('I wait for config to be reloaded'): wait_for_config_to_be_loaded()<|docstring|>Add dynamic configuration file to ClickHouse. :param config: configuration file description :param timeout: timeout, default: 300 sec :param restart: restart server, default: False :param modify: only modify configuration file, default: False<|endoftext|>
cd5273a5da494515e8a2cddcd2b4b56fafcd4330870b40106b0768024f69b100
@TestStep(When) def copy(self, dest_node, src_path, dest_path, bash=None, binary=False, eof='EOF', src_node=None): 'Copy file from source to destination node.' if binary: raise NotImplementedError('not yet implemented; need to use base64 encoding') bash = self.context.cluster.bash(node=src_node) cmd = bash(f'cat {src_path}') assert (cmd.exitcode == 0), error() contents = cmd.output dest_node.command(f'''cat << {eof} > {dest_path} {contents} {eof}''')
Copy file from source to destination node.
tests/testflows/helpers/common.py
copy
psyoblade/ClickHouse
1
python
@TestStep(When) def copy(self, dest_node, src_path, dest_path, bash=None, binary=False, eof='EOF', src_node=None): if binary: raise NotImplementedError('not yet implemented; need to use base64 encoding') bash = self.context.cluster.bash(node=src_node) cmd = bash(f'cat {src_path}') assert (cmd.exitcode == 0), error() contents = cmd.output dest_node.command(f'cat << {eof} > {dest_path} {contents} {eof}')
@TestStep(When) def copy(self, dest_node, src_path, dest_path, bash=None, binary=False, eof='EOF', src_node=None): if binary: raise NotImplementedError('not yet implemented; need to use base64 encoding') bash = self.context.cluster.bash(node=src_node) cmd = bash(f'cat {src_path}') assert (cmd.exitcode == 0), error() contents = cmd.output dest_node.command(f'cat << {eof} > {dest_path} {contents} {eof}')<|docstring|>Copy file from source to destination node.<|endoftext|>
13155711d3371d3209f61ec33f8d112abb3a3beb41e87ec057d012684d8ace34
@TestStep(Given) def add_user_to_group_on_node(self, node=None, group='clickhouse', username='clickhouse'): 'Add user {username} into group {group}.' if (node is None): node = self.context.node node.command(f'usermode -g {group} {username}', exitcode=0)
Add user {username} into group {group}.
tests/testflows/helpers/common.py
add_user_to_group_on_node
psyoblade/ClickHouse
1
python
@TestStep(Given) def add_user_to_group_on_node(self, node=None, group='clickhouse', username='clickhouse'): if (node is None): node = self.context.node node.command(f'usermode -g {group} {username}', exitcode=0)
@TestStep(Given) def add_user_to_group_on_node(self, node=None, group='clickhouse', username='clickhouse'): if (node is None): node = self.context.node node.command(f'usermode -g {group} {username}', exitcode=0)<|docstring|>Add user {username} into group {group}.<|endoftext|>
6c9035b4e724f4a3e93e3b587dc852303198ffae2ea897b1244fdf955a791ae4
@TestStep(Given) def change_user_on_node(self, node=None, username='clickhouse'): 'Change user on node.' if (node is None): node = self.context.node try: node.command(f'su {username}', exitcode=0) (yield) finally: node.command('exit', exitcode=0)
Change user on node.
tests/testflows/helpers/common.py
change_user_on_node
psyoblade/ClickHouse
1
python
@TestStep(Given) def change_user_on_node(self, node=None, username='clickhouse'): if (node is None): node = self.context.node try: node.command(f'su {username}', exitcode=0) (yield) finally: node.command('exit', exitcode=0)
@TestStep(Given) def change_user_on_node(self, node=None, username='clickhouse'): if (node is None): node = self.context.node try: node.command(f'su {username}', exitcode=0) (yield) finally: node.command('exit', exitcode=0)<|docstring|>Change user on node.<|endoftext|>
6911ee5ff0b7704d2012ebc3db1c5313b08d499316fa9cc17d7851b0b7cce36c
@TestStep(Given) def add_user_on_node(self, node=None, groupname=None, username='clickhouse'): 'Create user on node with group specifying.' if (node is None): node = self.context.node try: if (groupname is None): node.command(f'useradd -s /bin/bash {username}', exitcode=0) else: node.command(f'useradd -g {groupname} -s /bin/bash {username}', exitcode=0) (yield) finally: node.command(f'deluser {username}', exitcode=0)
Create user on node with group specifying.
tests/testflows/helpers/common.py
add_user_on_node
psyoblade/ClickHouse
1
python
@TestStep(Given) def add_user_on_node(self, node=None, groupname=None, username='clickhouse'): if (node is None): node = self.context.node try: if (groupname is None): node.command(f'useradd -s /bin/bash {username}', exitcode=0) else: node.command(f'useradd -g {groupname} -s /bin/bash {username}', exitcode=0) (yield) finally: node.command(f'deluser {username}', exitcode=0)
@TestStep(Given) def add_user_on_node(self, node=None, groupname=None, username='clickhouse'): if (node is None): node = self.context.node try: if (groupname is None): node.command(f'useradd -s /bin/bash {username}', exitcode=0) else: node.command(f'useradd -g {groupname} -s /bin/bash {username}', exitcode=0) (yield) finally: node.command(f'deluser {username}', exitcode=0)<|docstring|>Create user on node with group specifying.<|endoftext|>
409ca3e9e588b502b4e7c17f36f11024ec579d100b97daa927a0022e38473586
@TestStep(Given) def add_group_on_node(self, node=None, groupname='clickhouse'): 'Create group on node' if (node is None): node = self.context.node try: node.command(f'groupadd {groupname}', exitcode=0) (yield) finally: node.command(f'delgroup clickhouse')
Create group on node
tests/testflows/helpers/common.py
add_group_on_node
psyoblade/ClickHouse
1
python
@TestStep(Given) def add_group_on_node(self, node=None, groupname='clickhouse'): if (node is None): node = self.context.node try: node.command(f'groupadd {groupname}', exitcode=0) (yield) finally: node.command(f'delgroup clickhouse')
@TestStep(Given) def add_group_on_node(self, node=None, groupname='clickhouse'): if (node is None): node = self.context.node try: node.command(f'groupadd {groupname}', exitcode=0) (yield) finally: node.command(f'delgroup clickhouse')<|docstring|>Create group on node<|endoftext|>
32115174fdcf28e180a963243ea8778631f6e19429cd31a80ef52adf82b3501b
@TestStep(Given) def create_file_on_node(self, path, content, node=None): 'Create file on node.\n\n :param path: file path\n :param content: file content\n ' if (node is None): node = self.context.node try: with By(f'creating file {path}'): node.command(f'''cat <<HEREDOC > {path} {content} HEREDOC''', exitcode=0) (yield path) finally: with Finally(f'I remove {path}'): node.command(f'rm -rf {path}', exitcode=0)
Create file on node. :param path: file path :param content: file content
tests/testflows/helpers/common.py
create_file_on_node
psyoblade/ClickHouse
1
python
@TestStep(Given) def create_file_on_node(self, path, content, node=None): 'Create file on node.\n\n :param path: file path\n :param content: file content\n ' if (node is None): node = self.context.node try: with By(f'creating file {path}'): node.command(f'cat <<HEREDOC > {path} {content} HEREDOC', exitcode=0) (yield path) finally: with Finally(f'I remove {path}'): node.command(f'rm -rf {path}', exitcode=0)
@TestStep(Given) def create_file_on_node(self, path, content, node=None): 'Create file on node.\n\n :param path: file path\n :param content: file content\n ' if (node is None): node = self.context.node try: with By(f'creating file {path}'): node.command(f'cat <<HEREDOC > {path} {content} HEREDOC', exitcode=0) (yield path) finally: with Finally(f'I remove {path}'): node.command(f'rm -rf {path}', exitcode=0)<|docstring|>Create file on node. :param path: file path :param content: file content<|endoftext|>
321923cf579ca44a8018ba55296a47e963cd5afc5860bf3b0cfe62dfdbfdba05
@TestStep(Given) def set_envs_on_node(self, envs, node=None): 'Set environment variables on node.\n\n :param envs: dictionary of env variables key=value\n ' if (node is None): node = self.context.node try: with By('setting envs'): for (key, value) in envs.items(): node.command(f'export {key}={value}', exitcode=0) (yield) finally: with Finally(f'I unset envs'): for key in envs: node.command(f'unset {key}', exitcode=0)
Set environment variables on node. :param envs: dictionary of env variables key=value
tests/testflows/helpers/common.py
set_envs_on_node
psyoblade/ClickHouse
1
python
@TestStep(Given) def set_envs_on_node(self, envs, node=None): 'Set environment variables on node.\n\n :param envs: dictionary of env variables key=value\n ' if (node is None): node = self.context.node try: with By('setting envs'): for (key, value) in envs.items(): node.command(f'export {key}={value}', exitcode=0) (yield) finally: with Finally(f'I unset envs'): for key in envs: node.command(f'unset {key}', exitcode=0)
@TestStep(Given) def set_envs_on_node(self, envs, node=None): 'Set environment variables on node.\n\n :param envs: dictionary of env variables key=value\n ' if (node is None): node = self.context.node try: with By('setting envs'): for (key, value) in envs.items(): node.command(f'export {key}={value}', exitcode=0) (yield) finally: with Finally(f'I unset envs'): for key in envs: node.command(f'unset {key}', exitcode=0)<|docstring|>Set environment variables on node. :param envs: dictionary of env variables key=value<|endoftext|>