hexsha
stringlengths
40
40
size
int64
1
1.03M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
239
max_stars_repo_name
stringlengths
5
130
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
239
max_issues_repo_name
stringlengths
5
130
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
239
max_forks_repo_name
stringlengths
5
130
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
1
1.03M
avg_line_length
float64
1
958k
max_line_length
int64
1
1.03M
alphanum_fraction
float64
0
1
4a14bf0024d7bde780374873784e2a841db0a185
6,215
py
Python
vehicle_routing/vehicle_routing.py
institute-for-advanced-physical-studies/QOSF-cohort3
2b4b7d43fe6a3d0b7abf1f7918a565ce5b80fcf2
[ "MIT" ]
8
2021-05-02T15:42:37.000Z
2022-03-18T15:44:16.000Z
vehicle_routing/vehicle_routing.py
institute-for-advanced-physical-studies/QOSF-cohort3
2b4b7d43fe6a3d0b7abf1f7918a565ce5b80fcf2
[ "MIT" ]
null
null
null
vehicle_routing/vehicle_routing.py
institute-for-advanced-physical-studies/QOSF-cohort3
2b4b7d43fe6a3d0b7abf1f7918a565ce5b80fcf2
[ "MIT" ]
8
2021-08-03T06:43:13.000Z
2022-01-29T06:22:30.000Z
import numpy as np import dimod import time from functools import partial from solver_backend import SolverBackend from dwave.embedding.chain_strength import uniform_torque_compensation from qiskit_optimization.converters import QuadraticProgramToQubo from qiskit_optimization.algorithms import OptimizationResult class VehicleRouter: """Abstract Class for solving the Vehicle Routing Problem. To build a VRP solver, simply inherit from this class and overide the build_quadratic_program function in this class.""" def __init__(self, n_clients, n_vehicles, cost_matrix, **params): """Initializes the VRP by storing all inputs, initializing variables for storing the quadratic structures and results and calls the rebuild function to build all quadratic structures. Args: n_clients: No. of nodes in the problem (excluding the depot). n_vehicles: No. of vehicles available for delivery. cost_matrix: (n_clients + 1) x (n_clients + 1) matrix describing the cost of moving from node i to node j. penalty: Penalty value to use for constraints in the QUBO. Defaults to automatic calculation by qiskit converters. chain_strength: Chain strength to be used for D-Wave sampler. Defaults to automatic chain strength calculation via uniform torque compensation. num_reads: Number of samples to read. Defaults to 1000. solver: Select a backend solver. Defaults to 'dwave'. """ # Store critical inputs self.n = n_clients self.m = n_vehicles self.cost = np.array(cost_matrix) # Extract parameters self.penalty = params.setdefault('constraint_penalty', None) self.chain_strength = params.setdefault('chain_strength', partial(uniform_torque_compensation, prefactor=2)) self.num_reads = params.setdefault('num_reads', 1000) self.solver = params.setdefault('solver', 'dwave') # Initialize quadratic structures self.qp = None self.qubo = None self.bqm = None self.variables = None # Initialize result containers self.result = None self.solution = None # Initialize timer self.clock = None self.timing = {} # Initialize backend self.backend = SolverBackend(self) # Build quadratic models self.rebuild() def build_quadratic_program(self): """Dummy function to be overriden in child class. Required to set self.variables to contain the names of all variables in the form of a numpy array and self.qp to contain the quadratic program to be solved.""" # Dummy. Override in child class. pass def build_bqm(self): """Converts the quadratic program in self.qp to a QUBO by appending all constraints to the objective function in the form of penalties and then builds a BQM from the QUBO for solving by D-Wave.""" # Convert to QUBO converter = QuadraticProgramToQubo(penalty=self.penalty) self.qubo = converter.convert(self.qp) # Extract qubo data Q = self.qubo.objective.quadratic.to_dict(use_name=True) g = self.qubo.objective.linear.to_dict(use_name=True) c = self.qubo.objective.constant # Build BQM self.bqm = dimod.BQM(g, Q, c, dimod.BINARY) def rebuild(self): """Builds the quadratic program by calling build_quadratic_program and then the QUBO and BQM by calling build_bqm.""" # Begin stopwatch self.clock = time.time() # Rebuild quadratic models self.build_quadratic_program() self.build_bqm() # Record build time self.timing['qubo_build_time'] = (time.time() - self.clock) * 1e6 def extract_solution(self, result_dict): """Uses a result dictionary mapping variable names to the solved solution to build the self.solution variable in the same shape as self.variables and containing the corresponding solutions. Args: result_dict: Dictionary mapping variable names to solved values for these variables. """ # Extract solution from result dictionary var_list = self.variables.reshape(-1) self.solution = np.zeros(var_list.shape) for i in range(len(var_list)): self.solution[i] = result_dict[var_list[i]] # Reshape result self.solution = self.solution.reshape(self.variables.shape) def evaluate_vrp_cost(self): """Evaluate the optimized VRP cost under the optimized solution stored in self.solution. Returns: Optimized VRP cost as a float value. """ # Return optimized energy if type(self.result) == OptimizationResult: return self.result.fval else: return self.result.first.energy def evaluate_qubo_feasibility(self, data=None): """Evaluates whether the QUBO is feasible under the supplied data as inputs. If this data is not supplied, the self.solution variable is used instead. Args: data: Values of the variables in the solution to be tested. Defaults to self.solution. Returns: A 3-tuple containing a boolean value indicating whether the QUBO is feasible or not, a list of variables that violate constraints, and the list of violated constraints. If feasible, (True, [], []) is returned. """ # Resolve data if data is None: data = self.solution.reshape(-1) else: data = np.array(data).reshape(-1) # Get constraint violation data return self.qp.get_feasibility_info(data) def solve(self, **params): """Solve the QUBO using the selected solver. Args: params: Parameters to send to the selected backend solver. You may also specify the solver to select a different solver and override the specified self.solver. """ # Resolve solver params.setdefault('solver', self.solver) # Solve self.backend.solve(**params)
37.215569
118
0.661464
4a14c0599fdb2b16069ecefd827125c159faafc3
1,253
py
Python
src/qs/workflow.py
ryuichi1208/air-pipeline
eac5cad9f089e41ed5aace2fdaf0aff3696efb09
[ "Apache-2.0" ]
5
2019-12-01T07:50:04.000Z
2021-06-01T02:04:22.000Z
airflow_ml/quick_start/workflow.py
icoxfog417/airflow-ml-exercises
9fc1072a38be7a014ba2ec1a955d96b87c03e104
[ "MIT" ]
13
2019-12-04T23:09:46.000Z
2022-03-01T23:10:31.000Z
airflow_ml/quick_start/workflow.py
icoxfog417/airflow-ml-exercises
9fc1072a38be7a014ba2ec1a955d96b87c03e104
[ "MIT" ]
2
2020-05-22T14:27:49.000Z
2020-10-09T03:20:50.000Z
from datetime import datetime, timedelta import time import random from airflow import DAG from airflow.operators.python_operator import PythonOperator from pprint import pprint default_args = { "owner": "airflow", "depends_on_past": False, "start_date": datetime(2019, 4, 1), "email": ["airflow@example.com"], "email_on_failure": False, "email_on_retry": False, "retries": 1, "retry_delay": timedelta(minutes=5) } dag = DAG("airflow-ml-quickstart", default_args=default_args) def print_context(ds, **kwargs): pprint(kwargs) print(ds) return "Print to Log" print_task = PythonOperator( task_id="print_task", provide_context=True, python_callable=print_context, dag=dag, ) def sleep(seconds): time.sleep(seconds) def make_sleep_task(task_name, dag): seconds = random.randint(1, 3) task = PythonOperator( task_id=task_name, python_callable=sleep, op_kwargs={"seconds": float(seconds) / 10}, dag=dag, ) return task # print_task > sleep_task first_sleep = make_sleep_task("first_sleep", dag) last_sleep = make_sleep_task("last_sleep", dag) print_task.set_downstream(last_sleep) # sleep_task > print_task first_sleep >> print_task
20.883333
61
0.700718
4a14c0f3c3840a99b4f45bc09e61e584b2e42369
13,445
py
Python
tests/test_service.py
nyu-devops-2021-summer-promotions/promotions
892a54dc137c9f77eb1aab8aee65fdbc2756b100
[ "Apache-2.0" ]
null
null
null
tests/test_service.py
nyu-devops-2021-summer-promotions/promotions
892a54dc137c9f77eb1aab8aee65fdbc2756b100
[ "Apache-2.0" ]
119
2021-06-15T18:36:19.000Z
2021-08-11T17:30:26.000Z
tests/test_service.py
nyu-devops-2021-summer-promotions/promotions
892a54dc137c9f77eb1aab8aee65fdbc2756b100
[ "Apache-2.0" ]
5
2021-06-15T19:18:20.000Z
2021-08-08T15:14:49.000Z
""" Promotion API Service Test Suite Test cases can be run with the following: nosetests -v --with-spec --spec-color coverage report -m codecov --token=$CODECOV_TOKEN While debugging just these tests it's convinient to use this: nosetests --stop tests/test_service.py:TestPromotionServer """ import os import json import logging import unittest from urllib.parse import quote_plus from service import status # HTTP Status Codes from service.models import db from service.routes import app, init_db from .factories import PromotionFactory from dateutil import parser # # DATABASE_URI = os.getenv('DATABASE_URI', 'sqlite:///../db/test.db') # DATABASE_URI = os.getenv( # "DATABASE_URI", "postgres://postgres:postgres@localhost:5432/testdb" # ) DATABASE_URI = os.getenv( "DATABASE_URI", "postgres://postgres:postgres@localhost:5432/testdb" ) # override if we are running in Cloud Foundry if 'VCAP_SERVICES' in os.environ: vcap = json.loads(os.environ['VCAP_SERVICES']) DATABASE_URI = vcap['user-provided'][0]['credentials']['url'] BASE_URL = "/promotions" CONTENT_TYPE_JSON = "application/json" ###################################################################### # T E S T C A S E S ###################################################################### class TestPromotionServer(unittest.TestCase): """ Promotion Server Tests """ @classmethod def setUpClass(cls): """ Run once before all tests """ app.config["TESTING"] = True app.config["DEBUG"] = False app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_URI app.logger.setLevel(logging.CRITICAL) init_db() @classmethod def tearDownClass(cls): pass def setUp(self): """ Runs before each test """ db.drop_all() # clean up the last tests db.create_all() # create new tables self.app = app.test_client() def tearDown(self): db.session.remove() db.drop_all() def _create_promotions(self, count): """ Factory method to create promotions in bulk """ promotions = [] for _ in range(count): test_promotion = PromotionFactory() resp = self.app.post( "/promotions", json=test_promotion.serialize(), content_type="application/json" ) self.assertEqual( resp.status_code, status.HTTP_201_CREATED, "Could not create test promotion" ) new_promotion = resp.get_json() test_promotion.id = new_promotion["id"] promotions.append(test_promotion) return promotions def test_index(self): """ Test the Home Page """ resp = self.app.get("/") self.assertEqual(resp.status_code, status.HTTP_200_OK) self.assertIn(b"Promotion REST API Service", resp.data) def test_get_promotion_list(self): """ Get a list of Promotions """ self._create_promotions(5) resp = self.app.get("/promotions") self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(len(data), 5) def test_get_promotion(self): """ Get a single Promotion """ # get the id of a promotion test_promotion = self._create_promotions(1)[0] resp = self.app.get( "/promotions/{}".format(test_promotion.id), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(data["title"], test_promotion.title) def test_get_promotion_not_found(self): """ Get a Promotion thats not found """ resp = self.app.get("/promotions/0") self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) def test_create_promotion(self): """ Create a new Promotion """ test_promotion = PromotionFactory() logging.debug(test_promotion) resp = self.app.post( "/promotions", json=test_promotion.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # Make sure location header is set location = resp.headers.get("Location", None) self.assertIsNotNone(location) # Check the data is correct new_promotion = resp.get_json() self.assertEqual(new_promotion["title"], test_promotion.title, "Title do not match") self.assertEqual( new_promotion["promotion_type"], test_promotion.promotion_type, "Promotion Type do not match" ) self.assertEqual( parser.parse(new_promotion["start_date"]).strftime( '%Y-%m-%d'), test_promotion.start_date, "Start date does not match" ) self.assertEqual( parser.parse(new_promotion["end_date"]).strftime( '%Y-%m-%d'), test_promotion.end_date, "End date does not match" ) # Check that the location header was correct resp = self.app.get(location, content_type="application/json") self.assertEqual(resp.status_code, status.HTTP_200_OK) new_promotion = resp.get_json() self.assertEqual(new_promotion["title"], test_promotion.title, "Title do not match") self.assertEqual( new_promotion["promotion_type"], test_promotion.promotion_type, "Promotion Type do not match" ) self.assertEqual( parser.parse(new_promotion["start_date"]).strftime( '%Y-%m-%d'), test_promotion.start_date, "Start date does not match" ) self.assertEqual( parser.parse(new_promotion["end_date"]).strftime( '%Y-%m-%d'), test_promotion.end_date, "End date does not match" ) def test_create_promotion_no_data(self): """ Create a Promotion with missing data """ resp = self.app.post( "/promotions", json={}, content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST) def test_create_promotion_no_content_type(self): """ Create a Promotion with no content type """ resp = self.app.post("/promotions") self.assertEqual(resp.status_code, status.HTTP_415_UNSUPPORTED_MEDIA_TYPE) def test_update_promotion(self): """Update an existing Promotion""" # create a promotion to update test_promotion = PromotionFactory() resp = self.app.post( BASE_URL, json=test_promotion.serialize(), content_type=CONTENT_TYPE_JSON ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # update the promotion new_promotion = resp.get_json() logging.debug(new_promotion) new_promotion["title"] = "unknown" resp = self.app.put( "/promotions/{}".format(new_promotion["id"]), json=new_promotion, content_type=CONTENT_TYPE_JSON, ) self.assertEqual(resp.status_code, status.HTTP_200_OK) updated_promotion = resp.get_json() self.assertEqual(updated_promotion["title"], "unknown") resp = self.app.put( "/promotions/{}".format(int(new_promotion["id"])+1), json=new_promotion, content_type=CONTENT_TYPE_JSON, ) self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) def test_activate_promotion(self): """Activate an existing Promotion""" # create a promotion to update test_promotion = PromotionFactory() resp = self.app.post( BASE_URL, json=test_promotion.serialize(), content_type=CONTENT_TYPE_JSON ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # update the promotion new_promotion = resp.get_json() logging.debug(new_promotion) new_promotion["active"] = False self.assertEqual(new_promotion["active"], False) resp = self.app.put( "/promotions/{}/activate".format(new_promotion["id"]), json=new_promotion, content_type=CONTENT_TYPE_JSON, ) self.assertEqual(resp.status_code, status.HTTP_200_OK) updated_promotion = resp.get_json() self.assertEqual(updated_promotion["active"], True) resp = self.app.put( "/promotions/{}/activate".format(int(new_promotion["id"])+1), json=new_promotion, content_type=CONTENT_TYPE_JSON, ) self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) def test_deactivate_promotion(self): """Deactivate an existing Promotion""" # create a promotion to update test_promotion = PromotionFactory() resp = self.app.post( BASE_URL, json=test_promotion.serialize(), content_type=CONTENT_TYPE_JSON ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # update the promotion new_promotion = resp.get_json() logging.debug(new_promotion) new_promotion["active"] = True self.assertEqual(new_promotion["active"], True) resp = self.app.put( "/promotions/{}/deactivate".format(new_promotion["id"]), json=new_promotion, content_type=CONTENT_TYPE_JSON, ) self.assertEqual(resp.status_code, status.HTTP_200_OK) updated_promotion = resp.get_json() self.assertEqual(updated_promotion["active"], False) resp = self.app.put( "/promotions/{}/deactivate".format(int(new_promotion["id"])+1), json=new_promotion, content_type=CONTENT_TYPE_JSON, ) self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) def test_delete_promotion(self): """Delete a Promotion""" test_promotion = self._create_promotions(1)[0] resp = self.app.delete( "{0}/{1}".format(BASE_URL, test_promotion.id), content_type=CONTENT_TYPE_JSON ) self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(len(resp.data), 0) # make sure they are deleted resp = self.app.get( "{0}/{1}".format(BASE_URL, test_promotion.id), content_type=CONTENT_TYPE_JSON ) self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) def test_query_promotion_list_by_promotion_type(self): """Query Promotions by promotion_type""" promotions = self._create_promotions(10) test_promotion_type = promotions[0].promotion_type promotion_type_promotions = [promotion for promotion in promotions if promotion.promotion_type == test_promotion_type] resp = self.app.get( BASE_URL, query_string="promotion_type={}".format(quote_plus(test_promotion_type)) ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(len(data), len(promotion_type_promotions)) # check the data just to be sure for promotion in data: self.assertEqual(promotion["promotion_type"], test_promotion_type) def test_query_promotion_list_by_active(self): """Query Promotions by Active""" promotions = self._create_promotions(10) test_active = promotions[0].active active_promotions = [promotion for promotion in promotions if promotion.active == test_active] resp = self.app.get( BASE_URL, query_string="active={}".format(test_active) ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(len(data), len(active_promotions)) # check the data just to be sure for promotion in data: self.assertEqual(promotion["active"], test_active) def test_query_promotion_list_by_title(self): """Query Promotions by Title""" promotions = self._create_promotions(10) test_title = promotions[0].title title_promotions = [promotion for promotion in promotions if promotion.title == test_title] resp = self.app.get( BASE_URL, query_string="title={}".format(quote_plus(test_title)) ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(len(data), len(title_promotions)) # check the data just to be sure for promotion in data: self.assertEqual(promotion["title"], test_title) def test_query_promotion_list_by_end_date(self): """Query Promotions by End Date""" promotions = self._create_promotions(10) test_end_date = promotions[0].end_date end_date_promotions = [promotion for promotion in promotions if promotion.end_date == test_end_date] resp = self.app.get( BASE_URL, query_string="end_date={}".format(test_end_date) ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(len(data), len(end_date_promotions)) # check the data just to be sure for promotion in data: self.assertEqual(parser.parse(promotion["end_date"]). strftime('%Y-%m-%d'), test_end_date)
39.896142
126
0.634065
4a14c11c95d612ec6fd34eea62623d28a2460220
4,090
py
Python
robosuite/models/arenas/table_arena.py
s-tian/robosuite
56c20db6231e03ec5910869f7227819943fc24b2
[ "MIT" ]
4
2022-01-24T07:18:21.000Z
2022-01-25T02:29:17.000Z
robosuite/models/arenas/table_arena.py
s-tian/robosuite
56c20db6231e03ec5910869f7227819943fc24b2
[ "MIT" ]
1
2022-01-12T14:52:28.000Z
2022-01-12T14:52:28.000Z
robosuite/models/arenas/table_arena.py
s-tian/robosuite
56c20db6231e03ec5910869f7227819943fc24b2
[ "MIT" ]
null
null
null
import numpy as np from robosuite.models.arenas import Arena from robosuite.utils.mjcf_utils import array_to_string, string_to_array, xml_path_completion class TableArena(Arena): """ Workspace that contains an empty table. Args: table_full_size (3-tuple): (L,W,H) full dimensions of the table table_friction (3-tuple): (sliding, torsional, rolling) friction parameters of the table table_offset (3-tuple): (x,y,z) offset from center of arena when placing table. Note that the z value sets the upper limit of the table has_legs (bool): whether the table has legs or not xml (str): xml file to load arena """ def __init__( self, table_full_size=(0.8, 0.8, 0.05), table_friction=(1, 0.005, 0.0001), table_offset=(0, 0, 0.8), has_legs=True, xml="arenas/table_arena.xml", ): super().__init__(xml_path_completion(xml)) self.table_full_size = np.array(table_full_size) self.table_half_size = self.table_full_size / 2 self.table_friction = table_friction self.table_offset = table_offset self.center_pos = self.bottom_pos + np.array([0, 0, -self.table_half_size[2]]) + self.table_offset self.table_body = self.worldbody.find("./body[@name='table']") self.table_collision = self.table_body.find("./geom[@name='table_collision']") self.table_visual = self.table_body.find("./geom[@name='table_visual']") self.table_top = self.table_body.find("./site[@name='table_top']") self.has_legs = has_legs self.table_legs_visual = [ self.table_body.find("./geom[@name='table_leg1_visual']"), self.table_body.find("./geom[@name='table_leg2_visual']"), self.table_body.find("./geom[@name='table_leg3_visual']"), self.table_body.find("./geom[@name='table_leg4_visual']"), ] self.configure_location() def configure_location(self): """Configures correct locations for this arena""" self.floor.set("pos", array_to_string(self.bottom_pos)) self.table_body.set("pos", array_to_string(self.center_pos)) self.table_collision.set("size", array_to_string(self.table_half_size)) self.table_collision.set("friction", array_to_string(self.table_friction)) self.table_visual.set("size", array_to_string(self.table_half_size)) self.table_top.set("pos", array_to_string(np.array([0, 0, self.table_half_size[2]]))) # If we're not using legs, set their size to 0 if not self.has_legs: for leg in self.table_legs_visual: leg.set("rgba", array_to_string([1, 0, 0, 0])) leg.set("size", array_to_string([0.0001, 0.0001])) else: # Otherwise, set leg locations appropriately delta_x = [0.1, -0.1, -0.1, 0.1] delta_y = [0.1, 0.1, -0.1, -0.1] for leg, dx, dy in zip(self.table_legs_visual, delta_x, delta_y): # If x-length of table is less than a certain length, place leg in the middle between ends # Otherwise we place it near the edge x = 0 if self.table_half_size[0] > abs(dx * 2.0): x += np.sign(dx) * self.table_half_size[0] - dx # Repeat the same process for y y = 0 if self.table_half_size[1] > abs(dy * 2.0): y += np.sign(dy) * self.table_half_size[1] - dy # Get z value z = (self.table_offset[2] - self.table_half_size[2]) / 2.0 # Set leg position leg.set("pos", array_to_string([x, y, -z])) # Set leg size leg.set("size", array_to_string([0.025, z])) @property def table_top_abs(self): """ Grabs the absolute position of table top Returns: np.array: (x,y,z) table position """ return string_to_array(self.floor.get("pos")) + self.table_offset
41.734694
106
0.601956
4a14c1e35919d4960dfc3dd6069b2798f88c2cb0
797
py
Python
strings_question/compare_two_version.py
rjsnh1522/geeks-4-geeks-python
9bea0ce4f3fae9b5f9e5952fb5b4b3a8c6186cf4
[ "MIT" ]
null
null
null
strings_question/compare_two_version.py
rjsnh1522/geeks-4-geeks-python
9bea0ce4f3fae9b5f9e5952fb5b4b3a8c6186cf4
[ "MIT" ]
5
2021-03-10T11:49:39.000Z
2022-02-27T01:35:59.000Z
strings_question/compare_two_version.py
rjsnh1522/geeks-4-geeks-python
9bea0ce4f3fae9b5f9e5952fb5b4b3a8c6186cf4
[ "MIT" ]
null
null
null
class Solution: def compareVersion(self, A, B): v1 = A.split('.') v2 = B.split('.') if len(v1) < len(v2): while len(v1) < len(v2): v1.append("0") elif len(v2) < len(v1): while len(v2) < len(v1): v2.append("0") v1_len = len(v1) v2_len = len(v2) for i in range(v1_len): if int(v1[i]) > int(v2[i]): return 1 elif int(v1[i]) < int(v2[i]): return -1 elif int(v1[i]) == int(v2[i]): if i == (v1_len-1) and (i == v2_len-1): return 0 else: continue return -1 v1 = "2.9.8" v2 = "2.9.8" sol = Solution() print(sol.compareVersion(v1, v2))
23.441176
55
0.400251
4a14c2ca3958346ef494325c848b6df80527ffa7
3,754
py
Python
h1/models/iam_project_ownership_create.py
hyperonecom/h1-client-python
4ce355852ba3120ec1b8f509ab5894a5c08da730
[ "MIT" ]
null
null
null
h1/models/iam_project_ownership_create.py
hyperonecom/h1-client-python
4ce355852ba3120ec1b8f509ab5894a5c08da730
[ "MIT" ]
null
null
null
h1/models/iam_project_ownership_create.py
hyperonecom/h1-client-python
4ce355852ba3120ec1b8f509ab5894a5c08da730
[ "MIT" ]
null
null
null
# coding: utf-8 """ HyperOne HyperOne API # noqa: E501 The version of the OpenAPI document: 0.1.0 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from h1.configuration import Configuration class IamProjectOwnershipCreate(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'email': 'str' } attribute_map = { 'email': 'email' } def __init__(self, email=None, local_vars_configuration=None): # noqa: E501 """IamProjectOwnershipCreate - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._email = None self.discriminator = None self.email = email @property def email(self): """Gets the email of this IamProjectOwnershipCreate. # noqa: E501 :return: The email of this IamProjectOwnershipCreate. # noqa: E501 :rtype: str """ return self._email @email.setter def email(self, email): """Sets the email of this IamProjectOwnershipCreate. :param email: The email of this IamProjectOwnershipCreate. # noqa: E501 :type: str """ if self.local_vars_configuration.client_side_validation and email is None: # noqa: E501 raise ValueError("Invalid value for `email`, must not be `None`") # noqa: E501 if (self.local_vars_configuration.client_side_validation and email is not None and not re.search(r'^[\w\+\.-]+@([\w-]+\.)+[\w-]+$', email)): # noqa: E501 raise ValueError(r"Invalid value for `email`, must be a follow pattern or equal to `/^[\w\+\.-]+@([\w-]+\.)+[\w-]+$/`") # noqa: E501 self._email = email def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, IamProjectOwnershipCreate): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, IamProjectOwnershipCreate): return True return self.to_dict() != other.to_dict()
30.032
145
0.577784
4a14c32f8774b50d65b071544b1bea3d0f2db20e
1,451
py
Python
src/NumericalAnalysis/elliptic.py
sidnb13/ut-aero-research
4c0b3fbbabf9faed1414d28ad4307545378795b8
[ "MIT" ]
null
null
null
src/NumericalAnalysis/elliptic.py
sidnb13/ut-aero-research
4c0b3fbbabf9faed1414d28ad4307545378795b8
[ "MIT" ]
null
null
null
src/NumericalAnalysis/elliptic.py
sidnb13/ut-aero-research
4c0b3fbbabf9faed1414d28ad4307545378795b8
[ "MIT" ]
null
null
null
from requirements.imports import * from requirements.numericalMethods import * from requirements.variables import * # ELLIPTIC APPROXIMATION # first kind def elliptic1(k, n): # unfortunately cannot be vectorized, do not want to use a power series intvals = np.zeros(N) for i in range(0,N): intvals[i], temp = integrate.quad(lambda x: -np.pi/(2*n) + 1/(n*np.sqrt((1-np.power(x,2))*(1-k[i]**2*x**2))),0,1) return intvals # third kind def elliptic3(k,a,b,c,d): # unfortunately cannot be vectorized, do not want to use a power series intvals = np.zeros(N) for i in range(0,N): intvals[i], temp = integrate.quad(lambda x: a + c/(b*(1-d*np.sin(x)**2)*np.sqrt(1-k[i]**2*np.sin(x)**2)),0,1) return intvals ell0 = CurveFit(delta,phi_min,delta,f=elliptic3) ell0Coeffs = ell0.getCoeffs() ellmin = CurveFit(delta,phi_0,delta,f=elliptic3) ellminCoeffs = ellmin.getCoeffs() ellPlots = [ {'xvar': delta, 'yvar': elliptic3(delta,*ell0Coeffs), 'label': f'Approx for $\\phi_0$ with $n={ell0Coeffs[0]}$'}, {'xvar': delta, 'yvar': elliptic3(delta,*ellminCoeffs), 'label': f'Approx for $\\phi_\\mathrm{{min}}$ with $n={ellminCoeffs[0]}$'}, {'xvar': delta, 'yvar': phi_0, 'label': '$\\phi_0$'}, {'xvar': delta, 'yvar': phi_min, 'label': '$\\phi_\\mathrm{min}$'}] ellFile = 'plots/elliptical-approx.pdf' ellPlotObj = Plotter(ellPlots,limits,ellFile,{'x': '$\\delta$', 'y': '$\\phi$'}) ellPlotObj.plot(show=True)
41.457143
135
0.656099
4a14c6b5c3a4a429062fb862e9bfc3840259e2d1
57
py
Python
main.py
XPH0904/Library-management-system
9990654070caa9f757af9a6f4771ce4b1b484083
[ "Apache-2.0" ]
null
null
null
main.py
XPH0904/Library-management-system
9990654070caa9f757af9a6f4771ce4b1b484083
[ "Apache-2.0" ]
null
null
null
main.py
XPH0904/Library-management-system
9990654070caa9f757af9a6f4771ce4b1b484083
[ "Apache-2.0" ]
null
null
null
from src.frame.MainFrame import * main = MainFrame()
19
33
0.701754
4a14c7d92de75029b3d3e1442dabb172b08825f3
1,304
py
Python
Webserver-MVT/jcar/pop_model/get_set.py
augustoliks/IntelliJCar-JavaBotMVC
6037a70982e5af9bbdc9dbf1161c58272c511527
[ "BSD-2-Clause" ]
4
2018-03-23T14:44:05.000Z
2018-08-28T10:43:49.000Z
Webserver-MVT/jcar/pop_model/get_set.py
augustoliks/IntelliJCar-JavaBotMVC
6037a70982e5af9bbdc9dbf1161c58272c511527
[ "BSD-2-Clause" ]
null
null
null
Webserver-MVT/jcar/pop_model/get_set.py
augustoliks/IntelliJCar-JavaBotMVC
6037a70982e5af9bbdc9dbf1161c58272c511527
[ "BSD-2-Clause" ]
4
2018-04-16T04:57:36.000Z
2021-05-18T00:12:48.000Z
from jcar.pop_model.database import DataBase class GetSet(): def __init__(self): self.database = DataBase() self.today = self.database.last_date()[:8] self.data = {'gas': None, 'bat': None, 'lat': None, 'lon': None, 'gsm': None, 'tsp': None } def set_data(self, gas, bat, lat, lon, tsp, gsm): self.data['gas'] = gas self.data['bat'] = bat self.data['lat'] = lat self.data['lon'] = lon self.data['gsm'] = gsm self.data['tsp'] = tsp date = tsp[:8] hour = tsp[8:-2] json = { hour : self.data } self.today = date self.database.update_post(date, json) self.database.update_post('now', self.data) return self.data def get_history(self, date): return self.database.get_doc(date) def get_data(self, date, index): json = self.database.get_doc(date) return json[index] def get_history_today(self): return self.database.get_doc(self.today) def get_data_now(self): return self.database.get_doc('now') def get_last_date(self): return self.database.last_date()
26.08
53
0.519172
4a14c896c594a5b4e87780cdd12cfc354cd3ee1a
178
py
Python
src/objects/__init__.py
deepak2431/pygame-Coin-Fall-
2238fb8badc7db138e1bfa5e1d4e2672d652e935
[ "MIT" ]
null
null
null
src/objects/__init__.py
deepak2431/pygame-Coin-Fall-
2238fb8badc7db138e1bfa5e1d4e2672d652e935
[ "MIT" ]
null
null
null
src/objects/__init__.py
deepak2431/pygame-Coin-Fall-
2238fb8badc7db138e1bfa5e1d4e2672d652e935
[ "MIT" ]
null
null
null
from .cart import Cart from .coin import Coin from .bluecoin import BlueCoin from .bomb import Bomb from .death_zone import Death_Zone from .cart_one_v_one import Cart_One_V_One
25.428571
42
0.831461
4a14c8d9a925cce066bd7380c3704bbe7bd42a9c
4,517
py
Python
mmedit/models/backbones/encoder_decoders/decoders/indexnet_decoder.py
Jian137/mmediting-1
e1ac6c93441ec96696d0b530f040b91b809015b6
[ "Apache-2.0" ]
1,884
2020-07-09T18:53:43.000Z
2022-03-31T12:06:18.000Z
mmedit/models/backbones/encoder_decoders/decoders/indexnet_decoder.py
Jian137/mmediting-1
e1ac6c93441ec96696d0b530f040b91b809015b6
[ "Apache-2.0" ]
622
2020-07-09T18:52:27.000Z
2022-03-31T14:41:09.000Z
mmedit/models/backbones/encoder_decoders/decoders/indexnet_decoder.py
Jian137/mmediting-1
e1ac6c93441ec96696d0b530f040b91b809015b6
[ "Apache-2.0" ]
361
2020-07-09T19:21:47.000Z
2022-03-31T09:58:27.000Z
# Copyright (c) OpenMMLab. All rights reserved. import math import torch import torch.nn as nn import torch.nn.functional as F from mmcv.cnn import ConvModule, kaiming_init, normal_init from mmedit.models.common import DepthwiseSeparableConvModule from mmedit.models.registry import COMPONENTS class IndexedUpsample(nn.Module): """Indexed upsample module. Args: in_channels (int): Input channels. out_channels (int): Output channels. kernel_size (int, optional): Kernel size of the convolution layer. Defaults to 5. norm_cfg (dict, optional): Config dict for normalization layer. Defaults to dict(type='BN'). conv_module (ConvModule | DepthwiseSeparableConvModule, optional): Conv module. Defaults to ConvModule. """ def __init__(self, in_channels, out_channels, kernel_size=5, norm_cfg=dict(type='BN'), conv_module=ConvModule): super().__init__() self.conv = conv_module( in_channels, out_channels, kernel_size, padding=(kernel_size - 1) // 2, norm_cfg=norm_cfg, act_cfg=dict(type='ReLU6')) self.init_weights() def init_weights(self): """Init weights for the module. """ for m in self.modules(): if isinstance(m, nn.Conv2d): kaiming_init(m, mode='fan_in', nonlinearity='leaky_relu') def forward(self, x, shortcut, dec_idx_feat=None): """Forward function. Args: x (Tensor): Input feature map with shape (N, C, H, W). shortcut (Tensor): The shortcut connection with shape (N, C, H', W'). dec_idx_feat (Tensor, optional): The decode index feature map with shape (N, C, H', W'). Defaults to None. Returns: Tensor: Output tensor with shape (N, C, H', W'). """ if dec_idx_feat is not None: assert shortcut.dim() == 4, ( 'shortcut must be tensor with 4 dimensions') x = dec_idx_feat * F.interpolate(x, size=shortcut.shape[2:]) out = torch.cat((x, shortcut), dim=1) return self.conv(out) @COMPONENTS.register_module() class IndexNetDecoder(nn.Module): def __init__(self, in_channels, kernel_size=5, norm_cfg=dict(type='BN'), separable_conv=False): # TODO: remove in_channels argument super().__init__() if separable_conv: conv_module = DepthwiseSeparableConvModule else: conv_module = ConvModule blocks_in_channels = [ in_channels * 2, 96 * 2, 64 * 2, 32 * 2, 24 * 2, 16 * 2, 32 * 2 ] blocks_out_channels = [96, 64, 32, 24, 16, 32, 32] self.decoder_layers = nn.ModuleList() for in_channel, out_channel in zip(blocks_in_channels, blocks_out_channels): self.decoder_layers.append( IndexedUpsample(in_channel, out_channel, kernel_size, norm_cfg, conv_module)) self.pred = nn.Sequential( conv_module( 32, 1, kernel_size, padding=(kernel_size - 1) // 2, norm_cfg=norm_cfg, act_cfg=dict(type='ReLU6')), nn.Conv2d( 1, 1, kernel_size, padding=(kernel_size - 1) // 2, bias=False)) def init_weights(self): """Init weights for the module. """ for m in self.modules(): if isinstance(m, nn.Conv2d): std = math.sqrt(2. / (m.out_channels * m.kernel_size[0]**2)) normal_init(m, mean=0, std=std) def forward(self, inputs): """Forward function. Args: inputs (dict): Output dict of IndexNetEncoder. Returns: Tensor: Predicted alpha matte of the current batch. """ shortcuts = reversed(inputs['shortcuts']) dec_idx_feat_list = reversed(inputs['dec_idx_feat_list']) out = inputs['out'] group = (self.decoder_layers, shortcuts, dec_idx_feat_list) for decode_layer, shortcut, dec_idx_feat in zip(*group): out = decode_layer(out, shortcut, dec_idx_feat) out = self.pred(out) return out
32.264286
79
0.559663
4a14ca34fa4de43a1898dd95b41f84f9584bcb27
78,391
py
Python
packages/python/plotly/plotly/graph_objs/isosurface/_colorbar.py
dvd7587/plotly.py
1af3ca99884981c38fb49f4be40230f5f15c5fe6
[ "MIT" ]
1
2022-02-19T09:27:42.000Z
2022-02-19T09:27:42.000Z
packages/python/plotly/plotly/graph_objs/isosurface/_colorbar.py
jkkummerfeld/plotly.py
73ecb961cb51419e1c1b8fc0278a12d6f11d6881
[ "MIT" ]
null
null
null
packages/python/plotly/plotly/graph_objs/isosurface/_colorbar.py
jkkummerfeld/plotly.py
73ecb961cb51419e1c1b8fc0278a12d6f11d6881
[ "MIT" ]
null
null
null
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType import copy as _copy class ColorBar(_BaseTraceHierarchyType): # class properties # -------------------- _parent_path_str = "isosurface" _path_str = "isosurface.colorbar" _valid_props = { "bgcolor", "bordercolor", "borderwidth", "dtick", "exponentformat", "len", "lenmode", "minexponent", "nticks", "orientation", "outlinecolor", "outlinewidth", "separatethousands", "showexponent", "showticklabels", "showtickprefix", "showticksuffix", "thickness", "thicknessmode", "tick0", "tickangle", "tickcolor", "tickfont", "tickformat", "tickformatstopdefaults", "tickformatstops", "ticklabeloverflow", "ticklabelposition", "ticklabelstep", "ticklen", "tickmode", "tickprefix", "ticks", "ticksuffix", "ticktext", "ticktextsrc", "tickvals", "tickvalssrc", "tickwidth", "title", "titlefont", "titleside", "x", "xanchor", "xpad", "y", "yanchor", "ypad", } # bgcolor # ------- @property def bgcolor(self): """ Sets the color of padded area. The 'bgcolor' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str """ return self["bgcolor"] @bgcolor.setter def bgcolor(self, val): self["bgcolor"] = val # bordercolor # ----------- @property def bordercolor(self): """ Sets the axis line color. The 'bordercolor' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str """ return self["bordercolor"] @bordercolor.setter def bordercolor(self, val): self["bordercolor"] = val # borderwidth # ----------- @property def borderwidth(self): """ Sets the width (in px) or the border enclosing this color bar. The 'borderwidth' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["borderwidth"] @borderwidth.setter def borderwidth(self, val): self["borderwidth"] = val # dtick # ----- @property def dtick(self): """ Sets the step in-between ticks on this axis. Use with `tick0`. Must be a positive number, or special strings available to "log" and "date" axes. If the axis `type` is "log", then ticks are set every 10^(n*dtick) where n is the tick number. For example, to set a tick mark at 1, 10, 100, 1000, ... set dtick to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2. To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to log_10(5), or 0.69897000433. "log" has several special values; "L<f>", where `f` is a positive number, gives ticks linearly spaced in value (but not position). For example `tick0` = 0.1, `dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus small digits between, use "D1" (all digits) or "D2" (only 2 and 5). `tick0` is ignored for "D1" and "D2". If the axis `type` is "date", then you must convert the time to milliseconds. For example, to set the interval between ticks to one day, set `dtick` to 86400000.0. "date" also has special values "M<n>" gives ticks spaced by a number of months. `n` must be a positive integer. To set ticks on the 15th of every third month, set `tick0` to "2000-01-15" and `dtick` to "M3". To set ticks every 4 years, set `dtick` to "M48" The 'dtick' property accepts values of any type Returns ------- Any """ return self["dtick"] @dtick.setter def dtick(self, val): self["dtick"] = val # exponentformat # -------------- @property def exponentformat(self): """ Determines a formatting rule for the tick exponents. For example, consider the number 1,000,000,000. If "none", it appears as 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If "power", 1x10^9 (with 9 in a super script). If "SI", 1G. If "B", 1B. The 'exponentformat' property is an enumeration that may be specified as: - One of the following enumeration values: ['none', 'e', 'E', 'power', 'SI', 'B'] Returns ------- Any """ return self["exponentformat"] @exponentformat.setter def exponentformat(self, val): self["exponentformat"] = val # len # --- @property def len(self): """ Sets the length of the color bar This measure excludes the padding of both ends. That is, the color bar length is this length minus the padding on both ends. The 'len' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["len"] @len.setter def len(self, val): self["len"] = val # lenmode # ------- @property def lenmode(self): """ Determines whether this color bar's length (i.e. the measure in the color variation direction) is set in units of plot "fraction" or in *pixels. Use `len` to set the value. The 'lenmode' property is an enumeration that may be specified as: - One of the following enumeration values: ['fraction', 'pixels'] Returns ------- Any """ return self["lenmode"] @lenmode.setter def lenmode(self, val): self["lenmode"] = val # minexponent # ----------- @property def minexponent(self): """ Hide SI prefix for 10^n if |n| is below this number. This only has an effect when `tickformat` is "SI" or "B". The 'minexponent' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["minexponent"] @minexponent.setter def minexponent(self, val): self["minexponent"] = val # nticks # ------ @property def nticks(self): """ Specifies the maximum number of ticks for the particular axis. The actual number of ticks will be chosen automatically to be less than or equal to `nticks`. Has an effect only if `tickmode` is set to "auto". The 'nticks' property is a integer and may be specified as: - An int (or float that will be cast to an int) in the interval [0, 9223372036854775807] Returns ------- int """ return self["nticks"] @nticks.setter def nticks(self, val): self["nticks"] = val # orientation # ----------- @property def orientation(self): """ Sets the orientation of the colorbar. The 'orientation' property is an enumeration that may be specified as: - One of the following enumeration values: ['h', 'v'] Returns ------- Any """ return self["orientation"] @orientation.setter def orientation(self, val): self["orientation"] = val # outlinecolor # ------------ @property def outlinecolor(self): """ Sets the axis line color. The 'outlinecolor' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str """ return self["outlinecolor"] @outlinecolor.setter def outlinecolor(self, val): self["outlinecolor"] = val # outlinewidth # ------------ @property def outlinewidth(self): """ Sets the width (in px) of the axis line. The 'outlinewidth' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["outlinewidth"] @outlinewidth.setter def outlinewidth(self, val): self["outlinewidth"] = val # separatethousands # ----------------- @property def separatethousands(self): """ If "true", even 4-digit integers are separated The 'separatethousands' property must be specified as a bool (either True, or False) Returns ------- bool """ return self["separatethousands"] @separatethousands.setter def separatethousands(self, val): self["separatethousands"] = val # showexponent # ------------ @property def showexponent(self): """ If "all", all exponents are shown besides their significands. If "first", only the exponent of the first tick is shown. If "last", only the exponent of the last tick is shown. If "none", no exponents appear. The 'showexponent' property is an enumeration that may be specified as: - One of the following enumeration values: ['all', 'first', 'last', 'none'] Returns ------- Any """ return self["showexponent"] @showexponent.setter def showexponent(self, val): self["showexponent"] = val # showticklabels # -------------- @property def showticklabels(self): """ Determines whether or not the tick labels are drawn. The 'showticklabels' property must be specified as a bool (either True, or False) Returns ------- bool """ return self["showticklabels"] @showticklabels.setter def showticklabels(self, val): self["showticklabels"] = val # showtickprefix # -------------- @property def showtickprefix(self): """ If "all", all tick labels are displayed with a prefix. If "first", only the first tick is displayed with a prefix. If "last", only the last tick is displayed with a suffix. If "none", tick prefixes are hidden. The 'showtickprefix' property is an enumeration that may be specified as: - One of the following enumeration values: ['all', 'first', 'last', 'none'] Returns ------- Any """ return self["showtickprefix"] @showtickprefix.setter def showtickprefix(self, val): self["showtickprefix"] = val # showticksuffix # -------------- @property def showticksuffix(self): """ Same as `showtickprefix` but for tick suffixes. The 'showticksuffix' property is an enumeration that may be specified as: - One of the following enumeration values: ['all', 'first', 'last', 'none'] Returns ------- Any """ return self["showticksuffix"] @showticksuffix.setter def showticksuffix(self, val): self["showticksuffix"] = val # thickness # --------- @property def thickness(self): """ Sets the thickness of the color bar This measure excludes the size of the padding, ticks and labels. The 'thickness' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["thickness"] @thickness.setter def thickness(self, val): self["thickness"] = val # thicknessmode # ------------- @property def thicknessmode(self): """ Determines whether this color bar's thickness (i.e. the measure in the constant color direction) is set in units of plot "fraction" or in "pixels". Use `thickness` to set the value. The 'thicknessmode' property is an enumeration that may be specified as: - One of the following enumeration values: ['fraction', 'pixels'] Returns ------- Any """ return self["thicknessmode"] @thicknessmode.setter def thicknessmode(self, val): self["thicknessmode"] = val # tick0 # ----- @property def tick0(self): """ Sets the placement of the first tick on this axis. Use with `dtick`. If the axis `type` is "log", then you must take the log of your starting tick (e.g. to set the starting tick to 100, set the `tick0` to 2) except when `dtick`=*L<f>* (see `dtick` for more info). If the axis `type` is "date", it should be a date string, like date data. If the axis `type` is "category", it should be a number, using the scale where each category is assigned a serial number from zero in the order it appears. The 'tick0' property accepts values of any type Returns ------- Any """ return self["tick0"] @tick0.setter def tick0(self, val): self["tick0"] = val # tickangle # --------- @property def tickangle(self): """ Sets the angle of the tick labels with respect to the horizontal. For example, a `tickangle` of -90 draws the tick labels vertically. The 'tickangle' property is a angle (in degrees) that may be specified as a number between -180 and 180. Numeric values outside this range are converted to the equivalent value (e.g. 270 is converted to -90). Returns ------- int|float """ return self["tickangle"] @tickangle.setter def tickangle(self, val): self["tickangle"] = val # tickcolor # --------- @property def tickcolor(self): """ Sets the tick color. The 'tickcolor' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str """ return self["tickcolor"] @tickcolor.setter def tickcolor(self, val): self["tickcolor"] = val # tickfont # -------- @property def tickfont(self): """ Sets the color bar's tick label font The 'tickfont' property is an instance of Tickfont that may be specified as: - An instance of :class:`plotly.graph_objs.isosurface.colorbar.Tickfont` - A dict of string/value properties that will be passed to the Tickfont constructor Supported dict properties: color family HTML font family - the typeface that will be applied by the web browser. The web browser will only be able to apply a font if it is available on the system which it operates. Provide multiple font families, separated by commas, to indicate the preference in which to apply fonts if they aren't available on the system. The Chart Studio Cloud (at https://chart-studio.plotly.com or on-premise) generates images on a server, where only a select number of fonts are installed and supported. These include "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman". size Returns ------- plotly.graph_objs.isosurface.colorbar.Tickfont """ return self["tickfont"] @tickfont.setter def tickfont(self, val): self["tickfont"] = val # tickformat # ---------- @property def tickformat(self): """ Sets the tick label formatting rule using d3 formatting mini- languages which are very similar to those in Python. For numbers, see: https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for dates see: https://github.com/d3/d3-time- format/tree/v2.2.3#locale_format. We add two items to d3's date formatter: "%h" for half of the year as a decimal number as well as "%{n}f" for fractional seconds with n digits. For example, *2016-10-13 09:15:23.456* with tickformat "%H~%M~%S.%2f" would display "09~15~23.46" The 'tickformat' property is a string and must be specified as: - A string - A number that will be converted to a string Returns ------- str """ return self["tickformat"] @tickformat.setter def tickformat(self, val): self["tickformat"] = val # tickformatstops # --------------- @property def tickformatstops(self): """ The 'tickformatstops' property is a tuple of instances of Tickformatstop that may be specified as: - A list or tuple of instances of plotly.graph_objs.isosurface.colorbar.Tickformatstop - A list or tuple of dicts of string/value properties that will be passed to the Tickformatstop constructor Supported dict properties: dtickrange range [*min*, *max*], where "min", "max" - dtick values which describe some zoom level, it is possible to omit "min" or "max" value by passing "null" enabled Determines whether or not this stop is used. If `false`, this stop is ignored even within its `dtickrange`. name When used in a template, named items are created in the output figure in addition to any items the figure already has in this array. You can modify these items in the output figure by making your own item with `templateitemname` matching this `name` alongside your modifications (including `visible: false` or `enabled: false` to hide it). Has no effect outside of a template. templateitemname Used to refer to a named item in this array in the template. Named items from the template will be created even without a matching item in the input figure, but you can modify one by making an item with `templateitemname` matching its `name`, alongside your modifications (including `visible: false` or `enabled: false` to hide it). If there is no template or no matching item, this item will be hidden unless you explicitly show it with `visible: true`. value string - dtickformat for described zoom level, the same as "tickformat" Returns ------- tuple[plotly.graph_objs.isosurface.colorbar.Tickformatstop] """ return self["tickformatstops"] @tickformatstops.setter def tickformatstops(self, val): self["tickformatstops"] = val # tickformatstopdefaults # ---------------------- @property def tickformatstopdefaults(self): """ When used in a template (as layout.template.data.isosurface.col orbar.tickformatstopdefaults), sets the default property values to use for elements of isosurface.colorbar.tickformatstops The 'tickformatstopdefaults' property is an instance of Tickformatstop that may be specified as: - An instance of :class:`plotly.graph_objs.isosurface.colorbar.Tickformatstop` - A dict of string/value properties that will be passed to the Tickformatstop constructor Supported dict properties: Returns ------- plotly.graph_objs.isosurface.colorbar.Tickformatstop """ return self["tickformatstopdefaults"] @tickformatstopdefaults.setter def tickformatstopdefaults(self, val): self["tickformatstopdefaults"] = val # ticklabeloverflow # ----------------- @property def ticklabeloverflow(self): """ Determines how we handle tick labels that would overflow either the graph div or the domain of the axis. The default value for inside tick labels is *hide past domain*. In other cases the default is *hide past div*. The 'ticklabeloverflow' property is an enumeration that may be specified as: - One of the following enumeration values: ['allow', 'hide past div', 'hide past domain'] Returns ------- Any """ return self["ticklabeloverflow"] @ticklabeloverflow.setter def ticklabeloverflow(self, val): self["ticklabeloverflow"] = val # ticklabelposition # ----------------- @property def ticklabelposition(self): """ Determines where tick labels are drawn relative to the ticks. Left and right options are used when `orientation` is "h", top and bottom when `orientation` is "v". The 'ticklabelposition' property is an enumeration that may be specified as: - One of the following enumeration values: ['outside', 'inside', 'outside top', 'inside top', 'outside left', 'inside left', 'outside right', 'inside right', 'outside bottom', 'inside bottom'] Returns ------- Any """ return self["ticklabelposition"] @ticklabelposition.setter def ticklabelposition(self, val): self["ticklabelposition"] = val # ticklabelstep # ------------- @property def ticklabelstep(self): """ Sets the spacing between tick labels as compared to the spacing between ticks. A value of 1 (default) means each tick gets a label. A value of 2 means shows every 2nd label. A larger value n means only every nth tick is labeled. `tick0` determines which labels are shown. Not implemented for axes with `type` "log" or "multicategory", or when `tickmode` is "array". The 'ticklabelstep' property is a integer and may be specified as: - An int (or float that will be cast to an int) in the interval [1, 9223372036854775807] Returns ------- int """ return self["ticklabelstep"] @ticklabelstep.setter def ticklabelstep(self, val): self["ticklabelstep"] = val # ticklen # ------- @property def ticklen(self): """ Sets the tick length (in px). The 'ticklen' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["ticklen"] @ticklen.setter def ticklen(self, val): self["ticklen"] = val # tickmode # -------- @property def tickmode(self): """ Sets the tick mode for this axis. If "auto", the number of ticks is set via `nticks`. If "linear", the placement of the ticks is determined by a starting position `tick0` and a tick step `dtick` ("linear" is the default value if `tick0` and `dtick` are provided). If "array", the placement of the ticks is set via `tickvals` and the tick text is `ticktext`. ("array" is the default value if `tickvals` is provided). The 'tickmode' property is an enumeration that may be specified as: - One of the following enumeration values: ['auto', 'linear', 'array'] Returns ------- Any """ return self["tickmode"] @tickmode.setter def tickmode(self, val): self["tickmode"] = val # tickprefix # ---------- @property def tickprefix(self): """ Sets a tick label prefix. The 'tickprefix' property is a string and must be specified as: - A string - A number that will be converted to a string Returns ------- str """ return self["tickprefix"] @tickprefix.setter def tickprefix(self, val): self["tickprefix"] = val # ticks # ----- @property def ticks(self): """ Determines whether ticks are drawn or not. If "", this axis' ticks are not drawn. If "outside" ("inside"), this axis' are drawn outside (inside) the axis lines. The 'ticks' property is an enumeration that may be specified as: - One of the following enumeration values: ['outside', 'inside', ''] Returns ------- Any """ return self["ticks"] @ticks.setter def ticks(self, val): self["ticks"] = val # ticksuffix # ---------- @property def ticksuffix(self): """ Sets a tick label suffix. The 'ticksuffix' property is a string and must be specified as: - A string - A number that will be converted to a string Returns ------- str """ return self["ticksuffix"] @ticksuffix.setter def ticksuffix(self, val): self["ticksuffix"] = val # ticktext # -------- @property def ticktext(self): """ Sets the text displayed at the ticks position via `tickvals`. Only has an effect if `tickmode` is set to "array". Used with `tickvals`. The 'ticktext' property is an array that may be specified as a tuple, list, numpy array, or pandas Series Returns ------- numpy.ndarray """ return self["ticktext"] @ticktext.setter def ticktext(self, val): self["ticktext"] = val # ticktextsrc # ----------- @property def ticktextsrc(self): """ Sets the source reference on Chart Studio Cloud for `ticktext`. The 'ticktextsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["ticktextsrc"] @ticktextsrc.setter def ticktextsrc(self, val): self["ticktextsrc"] = val # tickvals # -------- @property def tickvals(self): """ Sets the values at which ticks on this axis appear. Only has an effect if `tickmode` is set to "array". Used with `ticktext`. The 'tickvals' property is an array that may be specified as a tuple, list, numpy array, or pandas Series Returns ------- numpy.ndarray """ return self["tickvals"] @tickvals.setter def tickvals(self, val): self["tickvals"] = val # tickvalssrc # ----------- @property def tickvalssrc(self): """ Sets the source reference on Chart Studio Cloud for `tickvals`. The 'tickvalssrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["tickvalssrc"] @tickvalssrc.setter def tickvalssrc(self, val): self["tickvalssrc"] = val # tickwidth # --------- @property def tickwidth(self): """ Sets the tick width (in px). The 'tickwidth' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["tickwidth"] @tickwidth.setter def tickwidth(self, val): self["tickwidth"] = val # title # ----- @property def title(self): """ The 'title' property is an instance of Title that may be specified as: - An instance of :class:`plotly.graph_objs.isosurface.colorbar.Title` - A dict of string/value properties that will be passed to the Title constructor Supported dict properties: font Sets this color bar's title font. Note that the title's font used to be set by the now deprecated `titlefont` attribute. side Determines the location of color bar's title with respect to the color bar. Defaults to "top" when `orientation` if "v" and defaults to "right" when `orientation` if "h". Note that the title's location used to be set by the now deprecated `titleside` attribute. text Sets the title of the color bar. Note that before the existence of `title.text`, the title's contents used to be defined as the `title` attribute itself. This behavior has been deprecated. Returns ------- plotly.graph_objs.isosurface.colorbar.Title """ return self["title"] @title.setter def title(self, val): self["title"] = val # titlefont # --------- @property def titlefont(self): """ Deprecated: Please use isosurface.colorbar.title.font instead. Sets this color bar's title font. Note that the title's font used to be set by the now deprecated `titlefont` attribute. The 'font' property is an instance of Font that may be specified as: - An instance of :class:`plotly.graph_objs.isosurface.colorbar.title.Font` - A dict of string/value properties that will be passed to the Font constructor Supported dict properties: color family HTML font family - the typeface that will be applied by the web browser. The web browser will only be able to apply a font if it is available on the system which it operates. Provide multiple font families, separated by commas, to indicate the preference in which to apply fonts if they aren't available on the system. The Chart Studio Cloud (at https://chart-studio.plotly.com or on-premise) generates images on a server, where only a select number of fonts are installed and supported. These include "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman". size Returns ------- """ return self["titlefont"] @titlefont.setter def titlefont(self, val): self["titlefont"] = val # titleside # --------- @property def titleside(self): """ Deprecated: Please use isosurface.colorbar.title.side instead. Determines the location of color bar's title with respect to the color bar. Defaults to "top" when `orientation` if "v" and defaults to "right" when `orientation` if "h". Note that the title's location used to be set by the now deprecated `titleside` attribute. The 'side' property is an enumeration that may be specified as: - One of the following enumeration values: ['right', 'top', 'bottom'] Returns ------- """ return self["titleside"] @titleside.setter def titleside(self, val): self["titleside"] = val # x # - @property def x(self): """ Sets the x position of the color bar (in plot fraction). Defaults to 1.02 when `orientation` is "v" and 0.5 when `orientation` is "h". The 'x' property is a number and may be specified as: - An int or float in the interval [-2, 3] Returns ------- int|float """ return self["x"] @x.setter def x(self, val): self["x"] = val # xanchor # ------- @property def xanchor(self): """ Sets this color bar's horizontal position anchor. This anchor binds the `x` position to the "left", "center" or "right" of the color bar. Defaults to "left" when `orientation` is "v" and "center" when `orientation` is "h". The 'xanchor' property is an enumeration that may be specified as: - One of the following enumeration values: ['left', 'center', 'right'] Returns ------- Any """ return self["xanchor"] @xanchor.setter def xanchor(self, val): self["xanchor"] = val # xpad # ---- @property def xpad(self): """ Sets the amount of padding (in px) along the x direction. The 'xpad' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["xpad"] @xpad.setter def xpad(self, val): self["xpad"] = val # y # - @property def y(self): """ Sets the y position of the color bar (in plot fraction). Defaults to 0.5 when `orientation` is "v" and 1.02 when `orientation` is "h". The 'y' property is a number and may be specified as: - An int or float in the interval [-2, 3] Returns ------- int|float """ return self["y"] @y.setter def y(self, val): self["y"] = val # yanchor # ------- @property def yanchor(self): """ Sets this color bar's vertical position anchor This anchor binds the `y` position to the "top", "middle" or "bottom" of the color bar. Defaults to "middle" when `orientation` is "v" and "bottom" when `orientation` is "h". The 'yanchor' property is an enumeration that may be specified as: - One of the following enumeration values: ['top', 'middle', 'bottom'] Returns ------- Any """ return self["yanchor"] @yanchor.setter def yanchor(self, val): self["yanchor"] = val # ypad # ---- @property def ypad(self): """ Sets the amount of padding (in px) along the y direction. The 'ypad' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float """ return self["ypad"] @ypad.setter def ypad(self, val): self["ypad"] = val # Self properties description # --------------------------- @property def _prop_descriptions(self): return """\ bgcolor Sets the color of padded area. bordercolor Sets the axis line color. borderwidth Sets the width (in px) or the border enclosing this color bar. dtick Sets the step in-between ticks on this axis. Use with `tick0`. Must be a positive number, or special strings available to "log" and "date" axes. If the axis `type` is "log", then ticks are set every 10^(n*dtick) where n is the tick number. For example, to set a tick mark at 1, 10, 100, 1000, ... set dtick to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2. To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to log_10(5), or 0.69897000433. "log" has several special values; "L<f>", where `f` is a positive number, gives ticks linearly spaced in value (but not position). For example `tick0` = 0.1, `dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus small digits between, use "D1" (all digits) or "D2" (only 2 and 5). `tick0` is ignored for "D1" and "D2". If the axis `type` is "date", then you must convert the time to milliseconds. For example, to set the interval between ticks to one day, set `dtick` to 86400000.0. "date" also has special values "M<n>" gives ticks spaced by a number of months. `n` must be a positive integer. To set ticks on the 15th of every third month, set `tick0` to "2000-01-15" and `dtick` to "M3". To set ticks every 4 years, set `dtick` to "M48" exponentformat Determines a formatting rule for the tick exponents. For example, consider the number 1,000,000,000. If "none", it appears as 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If "power", 1x10^9 (with 9 in a super script). If "SI", 1G. If "B", 1B. len Sets the length of the color bar This measure excludes the padding of both ends. That is, the color bar length is this length minus the padding on both ends. lenmode Determines whether this color bar's length (i.e. the measure in the color variation direction) is set in units of plot "fraction" or in *pixels. Use `len` to set the value. minexponent Hide SI prefix for 10^n if |n| is below this number. This only has an effect when `tickformat` is "SI" or "B". nticks Specifies the maximum number of ticks for the particular axis. The actual number of ticks will be chosen automatically to be less than or equal to `nticks`. Has an effect only if `tickmode` is set to "auto". orientation Sets the orientation of the colorbar. outlinecolor Sets the axis line color. outlinewidth Sets the width (in px) of the axis line. separatethousands If "true", even 4-digit integers are separated showexponent If "all", all exponents are shown besides their significands. If "first", only the exponent of the first tick is shown. If "last", only the exponent of the last tick is shown. If "none", no exponents appear. showticklabels Determines whether or not the tick labels are drawn. showtickprefix If "all", all tick labels are displayed with a prefix. If "first", only the first tick is displayed with a prefix. If "last", only the last tick is displayed with a suffix. If "none", tick prefixes are hidden. showticksuffix Same as `showtickprefix` but for tick suffixes. thickness Sets the thickness of the color bar This measure excludes the size of the padding, ticks and labels. thicknessmode Determines whether this color bar's thickness (i.e. the measure in the constant color direction) is set in units of plot "fraction" or in "pixels". Use `thickness` to set the value. tick0 Sets the placement of the first tick on this axis. Use with `dtick`. If the axis `type` is "log", then you must take the log of your starting tick (e.g. to set the starting tick to 100, set the `tick0` to 2) except when `dtick`=*L<f>* (see `dtick` for more info). If the axis `type` is "date", it should be a date string, like date data. If the axis `type` is "category", it should be a number, using the scale where each category is assigned a serial number from zero in the order it appears. tickangle Sets the angle of the tick labels with respect to the horizontal. For example, a `tickangle` of -90 draws the tick labels vertically. tickcolor Sets the tick color. tickfont Sets the color bar's tick label font tickformat Sets the tick label formatting rule using d3 formatting mini-languages which are very similar to those in Python. For numbers, see: https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for dates see: https://github.com/d3/d3-time- format/tree/v2.2.3#locale_format. We add two items to d3's date formatter: "%h" for half of the year as a decimal number as well as "%{n}f" for fractional seconds with n digits. For example, *2016-10-13 09:15:23.456* with tickformat "%H~%M~%S.%2f" would display "09~15~23.46" tickformatstops A tuple of :class:`plotly.graph_objects.isosurface.colo rbar.Tickformatstop` instances or dicts with compatible properties tickformatstopdefaults When used in a template (as layout.template.data.isosur face.colorbar.tickformatstopdefaults), sets the default property values to use for elements of isosurface.colorbar.tickformatstops ticklabeloverflow Determines how we handle tick labels that would overflow either the graph div or the domain of the axis. The default value for inside tick labels is *hide past domain*. In other cases the default is *hide past div*. ticklabelposition Determines where tick labels are drawn relative to the ticks. Left and right options are used when `orientation` is "h", top and bottom when `orientation` is "v". ticklabelstep Sets the spacing between tick labels as compared to the spacing between ticks. A value of 1 (default) means each tick gets a label. A value of 2 means shows every 2nd label. A larger value n means only every nth tick is labeled. `tick0` determines which labels are shown. Not implemented for axes with `type` "log" or "multicategory", or when `tickmode` is "array". ticklen Sets the tick length (in px). tickmode Sets the tick mode for this axis. If "auto", the number of ticks is set via `nticks`. If "linear", the placement of the ticks is determined by a starting position `tick0` and a tick step `dtick` ("linear" is the default value if `tick0` and `dtick` are provided). If "array", the placement of the ticks is set via `tickvals` and the tick text is `ticktext`. ("array" is the default value if `tickvals` is provided). tickprefix Sets a tick label prefix. ticks Determines whether ticks are drawn or not. If "", this axis' ticks are not drawn. If "outside" ("inside"), this axis' are drawn outside (inside) the axis lines. ticksuffix Sets a tick label suffix. ticktext Sets the text displayed at the ticks position via `tickvals`. Only has an effect if `tickmode` is set to "array". Used with `tickvals`. ticktextsrc Sets the source reference on Chart Studio Cloud for `ticktext`. tickvals Sets the values at which ticks on this axis appear. Only has an effect if `tickmode` is set to "array". Used with `ticktext`. tickvalssrc Sets the source reference on Chart Studio Cloud for `tickvals`. tickwidth Sets the tick width (in px). title :class:`plotly.graph_objects.isosurface.colorbar.Title` instance or dict with compatible properties titlefont Deprecated: Please use isosurface.colorbar.title.font instead. Sets this color bar's title font. Note that the title's font used to be set by the now deprecated `titlefont` attribute. titleside Deprecated: Please use isosurface.colorbar.title.side instead. Determines the location of color bar's title with respect to the color bar. Defaults to "top" when `orientation` if "v" and defaults to "right" when `orientation` if "h". Note that the title's location used to be set by the now deprecated `titleside` attribute. x Sets the x position of the color bar (in plot fraction). Defaults to 1.02 when `orientation` is "v" and 0.5 when `orientation` is "h". xanchor Sets this color bar's horizontal position anchor. This anchor binds the `x` position to the "left", "center" or "right" of the color bar. Defaults to "left" when `orientation` is "v" and "center" when `orientation` is "h". xpad Sets the amount of padding (in px) along the x direction. y Sets the y position of the color bar (in plot fraction). Defaults to 0.5 when `orientation` is "v" and 1.02 when `orientation` is "h". yanchor Sets this color bar's vertical position anchor This anchor binds the `y` position to the "top", "middle" or "bottom" of the color bar. Defaults to "middle" when `orientation` is "v" and "bottom" when `orientation` is "h". ypad Sets the amount of padding (in px) along the y direction. """ _mapped_properties = { "titlefont": ("title", "font"), "titleside": ("title", "side"), } def __init__( self, arg=None, bgcolor=None, bordercolor=None, borderwidth=None, dtick=None, exponentformat=None, len=None, lenmode=None, minexponent=None, nticks=None, orientation=None, outlinecolor=None, outlinewidth=None, separatethousands=None, showexponent=None, showticklabels=None, showtickprefix=None, showticksuffix=None, thickness=None, thicknessmode=None, tick0=None, tickangle=None, tickcolor=None, tickfont=None, tickformat=None, tickformatstops=None, tickformatstopdefaults=None, ticklabeloverflow=None, ticklabelposition=None, ticklabelstep=None, ticklen=None, tickmode=None, tickprefix=None, ticks=None, ticksuffix=None, ticktext=None, ticktextsrc=None, tickvals=None, tickvalssrc=None, tickwidth=None, title=None, titlefont=None, titleside=None, x=None, xanchor=None, xpad=None, y=None, yanchor=None, ypad=None, **kwargs ): """ Construct a new ColorBar object Parameters ---------- arg dict of properties compatible with this constructor or an instance of :class:`plotly.graph_objs.isosurface.ColorBar` bgcolor Sets the color of padded area. bordercolor Sets the axis line color. borderwidth Sets the width (in px) or the border enclosing this color bar. dtick Sets the step in-between ticks on this axis. Use with `tick0`. Must be a positive number, or special strings available to "log" and "date" axes. If the axis `type` is "log", then ticks are set every 10^(n*dtick) where n is the tick number. For example, to set a tick mark at 1, 10, 100, 1000, ... set dtick to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2. To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to log_10(5), or 0.69897000433. "log" has several special values; "L<f>", where `f` is a positive number, gives ticks linearly spaced in value (but not position). For example `tick0` = 0.1, `dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus small digits between, use "D1" (all digits) or "D2" (only 2 and 5). `tick0` is ignored for "D1" and "D2". If the axis `type` is "date", then you must convert the time to milliseconds. For example, to set the interval between ticks to one day, set `dtick` to 86400000.0. "date" also has special values "M<n>" gives ticks spaced by a number of months. `n` must be a positive integer. To set ticks on the 15th of every third month, set `tick0` to "2000-01-15" and `dtick` to "M3". To set ticks every 4 years, set `dtick` to "M48" exponentformat Determines a formatting rule for the tick exponents. For example, consider the number 1,000,000,000. If "none", it appears as 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If "power", 1x10^9 (with 9 in a super script). If "SI", 1G. If "B", 1B. len Sets the length of the color bar This measure excludes the padding of both ends. That is, the color bar length is this length minus the padding on both ends. lenmode Determines whether this color bar's length (i.e. the measure in the color variation direction) is set in units of plot "fraction" or in *pixels. Use `len` to set the value. minexponent Hide SI prefix for 10^n if |n| is below this number. This only has an effect when `tickformat` is "SI" or "B". nticks Specifies the maximum number of ticks for the particular axis. The actual number of ticks will be chosen automatically to be less than or equal to `nticks`. Has an effect only if `tickmode` is set to "auto". orientation Sets the orientation of the colorbar. outlinecolor Sets the axis line color. outlinewidth Sets the width (in px) of the axis line. separatethousands If "true", even 4-digit integers are separated showexponent If "all", all exponents are shown besides their significands. If "first", only the exponent of the first tick is shown. If "last", only the exponent of the last tick is shown. If "none", no exponents appear. showticklabels Determines whether or not the tick labels are drawn. showtickprefix If "all", all tick labels are displayed with a prefix. If "first", only the first tick is displayed with a prefix. If "last", only the last tick is displayed with a suffix. If "none", tick prefixes are hidden. showticksuffix Same as `showtickprefix` but for tick suffixes. thickness Sets the thickness of the color bar This measure excludes the size of the padding, ticks and labels. thicknessmode Determines whether this color bar's thickness (i.e. the measure in the constant color direction) is set in units of plot "fraction" or in "pixels". Use `thickness` to set the value. tick0 Sets the placement of the first tick on this axis. Use with `dtick`. If the axis `type` is "log", then you must take the log of your starting tick (e.g. to set the starting tick to 100, set the `tick0` to 2) except when `dtick`=*L<f>* (see `dtick` for more info). If the axis `type` is "date", it should be a date string, like date data. If the axis `type` is "category", it should be a number, using the scale where each category is assigned a serial number from zero in the order it appears. tickangle Sets the angle of the tick labels with respect to the horizontal. For example, a `tickangle` of -90 draws the tick labels vertically. tickcolor Sets the tick color. tickfont Sets the color bar's tick label font tickformat Sets the tick label formatting rule using d3 formatting mini-languages which are very similar to those in Python. For numbers, see: https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for dates see: https://github.com/d3/d3-time- format/tree/v2.2.3#locale_format. We add two items to d3's date formatter: "%h" for half of the year as a decimal number as well as "%{n}f" for fractional seconds with n digits. For example, *2016-10-13 09:15:23.456* with tickformat "%H~%M~%S.%2f" would display "09~15~23.46" tickformatstops A tuple of :class:`plotly.graph_objects.isosurface.colo rbar.Tickformatstop` instances or dicts with compatible properties tickformatstopdefaults When used in a template (as layout.template.data.isosur face.colorbar.tickformatstopdefaults), sets the default property values to use for elements of isosurface.colorbar.tickformatstops ticklabeloverflow Determines how we handle tick labels that would overflow either the graph div or the domain of the axis. The default value for inside tick labels is *hide past domain*. In other cases the default is *hide past div*. ticklabelposition Determines where tick labels are drawn relative to the ticks. Left and right options are used when `orientation` is "h", top and bottom when `orientation` is "v". ticklabelstep Sets the spacing between tick labels as compared to the spacing between ticks. A value of 1 (default) means each tick gets a label. A value of 2 means shows every 2nd label. A larger value n means only every nth tick is labeled. `tick0` determines which labels are shown. Not implemented for axes with `type` "log" or "multicategory", or when `tickmode` is "array". ticklen Sets the tick length (in px). tickmode Sets the tick mode for this axis. If "auto", the number of ticks is set via `nticks`. If "linear", the placement of the ticks is determined by a starting position `tick0` and a tick step `dtick` ("linear" is the default value if `tick0` and `dtick` are provided). If "array", the placement of the ticks is set via `tickvals` and the tick text is `ticktext`. ("array" is the default value if `tickvals` is provided). tickprefix Sets a tick label prefix. ticks Determines whether ticks are drawn or not. If "", this axis' ticks are not drawn. If "outside" ("inside"), this axis' are drawn outside (inside) the axis lines. ticksuffix Sets a tick label suffix. ticktext Sets the text displayed at the ticks position via `tickvals`. Only has an effect if `tickmode` is set to "array". Used with `tickvals`. ticktextsrc Sets the source reference on Chart Studio Cloud for `ticktext`. tickvals Sets the values at which ticks on this axis appear. Only has an effect if `tickmode` is set to "array". Used with `ticktext`. tickvalssrc Sets the source reference on Chart Studio Cloud for `tickvals`. tickwidth Sets the tick width (in px). title :class:`plotly.graph_objects.isosurface.colorbar.Title` instance or dict with compatible properties titlefont Deprecated: Please use isosurface.colorbar.title.font instead. Sets this color bar's title font. Note that the title's font used to be set by the now deprecated `titlefont` attribute. titleside Deprecated: Please use isosurface.colorbar.title.side instead. Determines the location of color bar's title with respect to the color bar. Defaults to "top" when `orientation` if "v" and defaults to "right" when `orientation` if "h". Note that the title's location used to be set by the now deprecated `titleside` attribute. x Sets the x position of the color bar (in plot fraction). Defaults to 1.02 when `orientation` is "v" and 0.5 when `orientation` is "h". xanchor Sets this color bar's horizontal position anchor. This anchor binds the `x` position to the "left", "center" or "right" of the color bar. Defaults to "left" when `orientation` is "v" and "center" when `orientation` is "h". xpad Sets the amount of padding (in px) along the x direction. y Sets the y position of the color bar (in plot fraction). Defaults to 0.5 when `orientation` is "v" and 1.02 when `orientation` is "h". yanchor Sets this color bar's vertical position anchor This anchor binds the `y` position to the "top", "middle" or "bottom" of the color bar. Defaults to "middle" when `orientation` is "v" and "bottom" when `orientation` is "h". ypad Sets the amount of padding (in px) along the y direction. Returns ------- ColorBar """ super(ColorBar, self).__init__("colorbar") if "_parent" in kwargs: self._parent = kwargs["_parent"] return # Validate arg # ------------ if arg is None: arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError( """\ The first argument to the plotly.graph_objs.isosurface.ColorBar constructor must be a dict or an instance of :class:`plotly.graph_objs.isosurface.ColorBar`""" ) # Handle skip_invalid # ------------------- self._skip_invalid = kwargs.pop("skip_invalid", False) self._validate = kwargs.pop("_validate", True) # Populate data dict with properties # ---------------------------------- _v = arg.pop("bgcolor", None) _v = bgcolor if bgcolor is not None else _v if _v is not None: self["bgcolor"] = _v _v = arg.pop("bordercolor", None) _v = bordercolor if bordercolor is not None else _v if _v is not None: self["bordercolor"] = _v _v = arg.pop("borderwidth", None) _v = borderwidth if borderwidth is not None else _v if _v is not None: self["borderwidth"] = _v _v = arg.pop("dtick", None) _v = dtick if dtick is not None else _v if _v is not None: self["dtick"] = _v _v = arg.pop("exponentformat", None) _v = exponentformat if exponentformat is not None else _v if _v is not None: self["exponentformat"] = _v _v = arg.pop("len", None) _v = len if len is not None else _v if _v is not None: self["len"] = _v _v = arg.pop("lenmode", None) _v = lenmode if lenmode is not None else _v if _v is not None: self["lenmode"] = _v _v = arg.pop("minexponent", None) _v = minexponent if minexponent is not None else _v if _v is not None: self["minexponent"] = _v _v = arg.pop("nticks", None) _v = nticks if nticks is not None else _v if _v is not None: self["nticks"] = _v _v = arg.pop("orientation", None) _v = orientation if orientation is not None else _v if _v is not None: self["orientation"] = _v _v = arg.pop("outlinecolor", None) _v = outlinecolor if outlinecolor is not None else _v if _v is not None: self["outlinecolor"] = _v _v = arg.pop("outlinewidth", None) _v = outlinewidth if outlinewidth is not None else _v if _v is not None: self["outlinewidth"] = _v _v = arg.pop("separatethousands", None) _v = separatethousands if separatethousands is not None else _v if _v is not None: self["separatethousands"] = _v _v = arg.pop("showexponent", None) _v = showexponent if showexponent is not None else _v if _v is not None: self["showexponent"] = _v _v = arg.pop("showticklabels", None) _v = showticklabels if showticklabels is not None else _v if _v is not None: self["showticklabels"] = _v _v = arg.pop("showtickprefix", None) _v = showtickprefix if showtickprefix is not None else _v if _v is not None: self["showtickprefix"] = _v _v = arg.pop("showticksuffix", None) _v = showticksuffix if showticksuffix is not None else _v if _v is not None: self["showticksuffix"] = _v _v = arg.pop("thickness", None) _v = thickness if thickness is not None else _v if _v is not None: self["thickness"] = _v _v = arg.pop("thicknessmode", None) _v = thicknessmode if thicknessmode is not None else _v if _v is not None: self["thicknessmode"] = _v _v = arg.pop("tick0", None) _v = tick0 if tick0 is not None else _v if _v is not None: self["tick0"] = _v _v = arg.pop("tickangle", None) _v = tickangle if tickangle is not None else _v if _v is not None: self["tickangle"] = _v _v = arg.pop("tickcolor", None) _v = tickcolor if tickcolor is not None else _v if _v is not None: self["tickcolor"] = _v _v = arg.pop("tickfont", None) _v = tickfont if tickfont is not None else _v if _v is not None: self["tickfont"] = _v _v = arg.pop("tickformat", None) _v = tickformat if tickformat is not None else _v if _v is not None: self["tickformat"] = _v _v = arg.pop("tickformatstops", None) _v = tickformatstops if tickformatstops is not None else _v if _v is not None: self["tickformatstops"] = _v _v = arg.pop("tickformatstopdefaults", None) _v = tickformatstopdefaults if tickformatstopdefaults is not None else _v if _v is not None: self["tickformatstopdefaults"] = _v _v = arg.pop("ticklabeloverflow", None) _v = ticklabeloverflow if ticklabeloverflow is not None else _v if _v is not None: self["ticklabeloverflow"] = _v _v = arg.pop("ticklabelposition", None) _v = ticklabelposition if ticklabelposition is not None else _v if _v is not None: self["ticklabelposition"] = _v _v = arg.pop("ticklabelstep", None) _v = ticklabelstep if ticklabelstep is not None else _v if _v is not None: self["ticklabelstep"] = _v _v = arg.pop("ticklen", None) _v = ticklen if ticklen is not None else _v if _v is not None: self["ticklen"] = _v _v = arg.pop("tickmode", None) _v = tickmode if tickmode is not None else _v if _v is not None: self["tickmode"] = _v _v = arg.pop("tickprefix", None) _v = tickprefix if tickprefix is not None else _v if _v is not None: self["tickprefix"] = _v _v = arg.pop("ticks", None) _v = ticks if ticks is not None else _v if _v is not None: self["ticks"] = _v _v = arg.pop("ticksuffix", None) _v = ticksuffix if ticksuffix is not None else _v if _v is not None: self["ticksuffix"] = _v _v = arg.pop("ticktext", None) _v = ticktext if ticktext is not None else _v if _v is not None: self["ticktext"] = _v _v = arg.pop("ticktextsrc", None) _v = ticktextsrc if ticktextsrc is not None else _v if _v is not None: self["ticktextsrc"] = _v _v = arg.pop("tickvals", None) _v = tickvals if tickvals is not None else _v if _v is not None: self["tickvals"] = _v _v = arg.pop("tickvalssrc", None) _v = tickvalssrc if tickvalssrc is not None else _v if _v is not None: self["tickvalssrc"] = _v _v = arg.pop("tickwidth", None) _v = tickwidth if tickwidth is not None else _v if _v is not None: self["tickwidth"] = _v _v = arg.pop("title", None) _v = title if title is not None else _v if _v is not None: self["title"] = _v _v = arg.pop("titlefont", None) _v = titlefont if titlefont is not None else _v if _v is not None: self["titlefont"] = _v _v = arg.pop("titleside", None) _v = titleside if titleside is not None else _v if _v is not None: self["titleside"] = _v _v = arg.pop("x", None) _v = x if x is not None else _v if _v is not None: self["x"] = _v _v = arg.pop("xanchor", None) _v = xanchor if xanchor is not None else _v if _v is not None: self["xanchor"] = _v _v = arg.pop("xpad", None) _v = xpad if xpad is not None else _v if _v is not None: self["xpad"] = _v _v = arg.pop("y", None) _v = y if y is not None else _v if _v is not None: self["y"] = _v _v = arg.pop("yanchor", None) _v = yanchor if yanchor is not None else _v if _v is not None: self["yanchor"] = _v _v = arg.pop("ypad", None) _v = ypad if ypad is not None else _v if _v is not None: self["ypad"] = _v # Process unknown kwargs # ---------------------- self._process_kwargs(**dict(arg, **kwargs)) # Reset skip_invalid # ------------------ self._skip_invalid = False
36.208314
96
0.560294
4a14ca55c83bdf20bf41aea390762ad13ed3d9e7
4,579
py
Python
src/xml_processing/process_many_pmids_to_json.py
alliance-genome/agr_literature_service
2278316422d5c3ab65e21bb97d91e861e48853c5
[ "MIT" ]
null
null
null
src/xml_processing/process_many_pmids_to_json.py
alliance-genome/agr_literature_service
2278316422d5c3ab65e21bb97d91e861e48853c5
[ "MIT" ]
39
2021-10-18T17:02:49.000Z
2022-03-28T20:56:24.000Z
src/xml_processing/process_many_pmids_to_json.py
alliance-genome/agr_literature_service
2278316422d5c3ab65e21bb97d91e861e48853c5
[ "MIT" ]
1
2021-10-21T00:11:18.000Z
2021-10-21T00:11:18.000Z
import time from os import environ, path, makedirs import argparse import sys import logging # import logging.config from get_pubmed_xml import download_pubmed_xml from xml_to_json import generate_json # pipenv run python process_many_pmids_to_json.py -f inputs/alliance_pmids # # to force skip of downloading xml # pipenv run python process_many_pmids_to_json.py -s -f inputs/alliance_pmids # # enter a file with a list of pmids as an argument, download xml, convert to json, find new pmids in commentsCorrections, recurse, output list of pubmed-based (as opposed to MOD-DQM-based) pmids to inputs/pubmed_only_pmids # log_file_path = path.join(path.dirname(path.abspath(__file__)), '../logging.conf') # logging.config.fileConfig(log_file_path) # logger = logging.getLogger('literature logger') logging.basicConfig(level=logging.INFO, stream=sys.stdout, format= '%(asctime)s - %(levelname)s - {%(module)s %(funcName)s:%(lineno)d} - %(message)s', # noqa E251 datefmt='%Y-%m-%d %H:%M:%S') logger = logging.getLogger(__name__) parser = argparse.ArgumentParser() parser.add_argument('-c', '--commandline', nargs='*', action='store', help='take input from command line flag') parser.add_argument('-f', '--file', action='store', help='take input from entries in file with full path') parser.add_argument('-s', '--skip-download', action='store_true', help='do not download PubMed XML in testing mode') args = vars(parser.parse_args()) def download_and_convert_pmids(pmids_wanted, skip_download_flag): """ :param pmids_wanted: :return: """ pmids_original = pmids_wanted pmids_additional = [] pmids_new_list = pmids_wanted pmids_additional = recursively_process_pmids(pmids_original, pmids_additional, pmids_new_list, skip_download_flag) base_path = environ.get('XML_PATH') inputs_path = base_path + 'inputs/' if not path.exists(inputs_path): makedirs(inputs_path) pubmed_only_filepath = base_path + 'inputs/pubmed_only_pmids' pmids_additional.sort(key=int) # for pmid in pmids_additional: # logger.info("new_pmid %s", pmid) # print("pubmed additional %s" % (pmid)) pmids_additional_string = ("\n".join(pmids_additional)) with open(pubmed_only_filepath, "w") as pubmed_only_fh: pubmed_only_fh.write(pmids_additional_string) pubmed_all_filepath = base_path + 'inputs/all_pmids' pmids_all_list = pmids_wanted + pmids_additional pmids_all_list.sort(key=int) pmids_all_string = ("\n".join(pmids_all_list)) with open(pubmed_all_filepath, "w") as pubmed_all_fh: pubmed_all_fh.write(pmids_all_string) def recursively_process_pmids(pmids_original, pmids_additional, pmids_new_list, skip_download_flag): """ :param pmids_original: :param pmids_additional: :param pmids_new_list: :return: """ if not skip_download_flag: download_pubmed_xml(pmids_new_list) pmids_already_processed = pmids_original + pmids_additional pmids_new_list = generate_json(pmids_new_list, pmids_already_processed) # for pmid in pmids_new_list: # logger.info("new_pmid %s", pmid) # print("newly found %s" % (pmid)) # print(pmids_new_list) # print(pmids_additional) if pmids_new_list: time.sleep(1) pmids_additional.extend(pmids_new_list) recursively_process_pmids(pmids_original, pmids_additional, pmids_new_list, skip_download_flag) return pmids_additional if __name__ == "__main__": """ call main start function skip download flag is to avoid downloading new pubmed_xml/ when running tests, although if the files already exist there from the repo, they won't get downloaded anyway. """ pmids_wanted = [] skip_download_flag = False if args['skip_download']: skip_download_flag = args['skip_download'] # python process_single_pmid.py -c 1234 4576 1828 if args['commandline']: logger.info("Processing commandline input") for pmid in args['commandline']: pmids_wanted.append(pmid) elif args['file']: logger.info("Processing file input from %s", args['file']) with open(args['file'], 'r') as fp: pmid = fp.readline() while pmid: pmids_wanted.append(pmid.rstrip()) pmid = fp.readline() else: logger.info("Must enter a PMID through command line") download_and_convert_pmids(pmids_wanted, skip_download_flag) logger.info("Done Processing")
35.496124
223
0.70321
4a14cb352138efe10244ca68e2a994bb848e3d6b
13,947
py
Python
Lib/test/test__osx_support.py
finefoot/cpython
ffcc7cd57f6a52c6074ecc9f0a9f0177fb1dbfee
[ "0BSD" ]
1
2021-11-05T12:29:12.000Z
2021-11-05T12:29:12.000Z
Lib/test/test__osx_support.py
finefoot/cpython
ffcc7cd57f6a52c6074ecc9f0a9f0177fb1dbfee
[ "0BSD" ]
3
2021-12-01T00:06:10.000Z
2022-03-01T00:03:04.000Z
Lib/test/test__osx_support.py
finefoot/cpython
ffcc7cd57f6a52c6074ecc9f0a9f0177fb1dbfee
[ "0BSD" ]
1
2019-04-06T18:29:45.000Z
2019-04-06T18:29:45.000Z
""" Test suite for _osx_support: shared OS X support functions. """ import os import platform import stat import sys import unittest from test.support import os_helper import _osx_support @unittest.skipUnless(sys.platform.startswith("darwin"), "requires OS X") class Test_OSXSupport(unittest.TestCase): def setUp(self): self.maxDiff = None self.prog_name = 'bogus_program_xxxx' self.temp_path_dir = os.path.abspath(os.getcwd()) self.env = self.enterContext(os_helper.EnvironmentVarGuard()) for cv in ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS'): if cv in self.env: self.env.unset(cv) def add_expected_saved_initial_values(self, config_vars, expected_vars): # Ensure that the initial values for all modified config vars # are also saved with modified keys. expected_vars.update(('_OSX_SUPPORT_INITIAL_'+ k, config_vars[k]) for k in config_vars if config_vars[k] != expected_vars[k]) def test__find_executable(self): if self.env['PATH']: self.env['PATH'] = self.env['PATH'] + ':' self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir) os_helper.unlink(self.prog_name) self.assertIsNone(_osx_support._find_executable(self.prog_name)) self.addCleanup(os_helper.unlink, self.prog_name) with open(self.prog_name, 'w') as f: f.write("#!/bin/sh\n/bin/echo OK\n") os.chmod(self.prog_name, stat.S_IRWXU) self.assertEqual(self.prog_name, _osx_support._find_executable(self.prog_name)) def test__read_output(self): if self.env['PATH']: self.env['PATH'] = self.env['PATH'] + ':' self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir) os_helper.unlink(self.prog_name) self.addCleanup(os_helper.unlink, self.prog_name) with open(self.prog_name, 'w') as f: f.write("#!/bin/sh\n/bin/echo ExpectedOutput\n") os.chmod(self.prog_name, stat.S_IRWXU) self.assertEqual('ExpectedOutput', _osx_support._read_output(self.prog_name)) def test__find_build_tool(self): out = _osx_support._find_build_tool('cc') self.assertTrue(os.path.isfile(out), 'cc not found - check xcode-select') def test__get_system_version(self): self.assertTrue(platform.mac_ver()[0].startswith( _osx_support._get_system_version())) def test__remove_original_values(self): config_vars = { 'CC': 'gcc-test -pthreads', } expected_vars = { 'CC': 'clang -pthreads', } cv = 'CC' newvalue = 'clang -pthreads' _osx_support._save_modified_value(config_vars, cv, newvalue) self.assertNotEqual(expected_vars, config_vars) _osx_support._remove_original_values(config_vars) self.assertEqual(expected_vars, config_vars) def test__save_modified_value(self): config_vars = { 'CC': 'gcc-test -pthreads', } expected_vars = { 'CC': 'clang -pthreads', } self.add_expected_saved_initial_values(config_vars, expected_vars) cv = 'CC' newvalue = 'clang -pthreads' _osx_support._save_modified_value(config_vars, cv, newvalue) self.assertEqual(expected_vars, config_vars) def test__save_modified_value_unchanged(self): config_vars = { 'CC': 'gcc-test -pthreads', } expected_vars = config_vars.copy() cv = 'CC' newvalue = 'gcc-test -pthreads' _osx_support._save_modified_value(config_vars, cv, newvalue) self.assertEqual(expected_vars, config_vars) def test__supports_universal_builds(self): import platform mac_ver_tuple = tuple(int(i) for i in platform.mac_ver()[0].split('.')[0:2]) self.assertEqual(mac_ver_tuple >= (10, 4), _osx_support._supports_universal_builds()) def test__find_appropriate_compiler(self): compilers = ( ('gcc-test', 'i686-apple-darwin11-llvm-gcc-4.2'), ('clang', 'clang version 3.1'), ) config_vars = { 'CC': 'gcc-test -pthreads', 'CXX': 'cc++-test', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', 'BLDSHARED': 'gcc-test -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-test -bundle -arch ppc -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', } expected_vars = { 'CC': 'clang -pthreads', 'CXX': 'clang++', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', 'BLDSHARED': 'clang -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'clang -bundle -arch ppc -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', } self.add_expected_saved_initial_values(config_vars, expected_vars) suffix = (':' + self.env['PATH']) if self.env['PATH'] else '' self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix for c_name, c_output in compilers: os_helper.unlink(c_name) self.addCleanup(os_helper.unlink, c_name) with open(c_name, 'w') as f: f.write("#!/bin/sh\n/bin/echo " + c_output) os.chmod(c_name, stat.S_IRWXU) self.assertEqual(expected_vars, _osx_support._find_appropriate_compiler( config_vars)) def test__remove_universal_flags(self): config_vars = { 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', } expected_vars = { 'CFLAGS': '-fno-strict-aliasing -g -O3 ', 'LDFLAGS': ' -g', 'CPPFLAGS': '-I. ', 'BLDSHARED': 'gcc-4.0 -bundle -g', 'LDSHARED': 'gcc-4.0 -bundle -g', } self.add_expected_saved_initial_values(config_vars, expected_vars) self.assertEqual(expected_vars, _osx_support._remove_universal_flags( config_vars)) def test__remove_universal_flags_alternate(self): # bpo-38360: also test the alternate single-argument form of -isysroot config_vars = { 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. -isysroot/Developer/SDKs/MacOSX10.4u.sdk', 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' '-isysroot/Developer/SDKs/MacOSX10.4u.sdk -g', } expected_vars = { 'CFLAGS': '-fno-strict-aliasing -g -O3 ', 'LDFLAGS': ' -g', 'CPPFLAGS': '-I. ', 'BLDSHARED': 'gcc-4.0 -bundle -g', 'LDSHARED': 'gcc-4.0 -bundle -g', } self.add_expected_saved_initial_values(config_vars, expected_vars) self.assertEqual(expected_vars, _osx_support._remove_universal_flags( config_vars)) def test__remove_unsupported_archs(self): config_vars = { 'CC': 'clang', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', } expected_vars = { 'CC': 'clang', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch i386 ', 'LDFLAGS': ' -arch i386 -g', 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', 'BLDSHARED': 'gcc-4.0 -bundle -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', } self.add_expected_saved_initial_values(config_vars, expected_vars) suffix = (':' + self.env['PATH']) if self.env['PATH'] else '' self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix c_name = 'clang' os_helper.unlink(c_name) self.addCleanup(os_helper.unlink, c_name) # exit status 255 means no PPC support in this compiler chain with open(c_name, 'w') as f: f.write("#!/bin/sh\nexit 255") os.chmod(c_name, stat.S_IRWXU) self.assertEqual(expected_vars, _osx_support._remove_unsupported_archs( config_vars)) def test__override_all_archs(self): self.env['ARCHFLAGS'] = '-arch x86_64' config_vars = { 'CC': 'clang', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', } expected_vars = { 'CC': 'clang', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch x86_64', 'LDFLAGS': ' -g -arch x86_64', 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', 'BLDSHARED': 'gcc-4.0 -bundle -g -arch x86_64', 'LDSHARED': 'gcc-4.0 -bundle -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk -g -arch x86_64', } self.add_expected_saved_initial_values(config_vars, expected_vars) self.assertEqual(expected_vars, _osx_support._override_all_archs( config_vars)) def test__check_for_unavailable_sdk(self): config_vars = { 'CC': 'clang', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.1.sdk', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.1.sdk', 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.1.sdk -g', } expected_vars = { 'CC': 'clang', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ' ' ', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. ', 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' ' -g', } self.add_expected_saved_initial_values(config_vars, expected_vars) self.assertEqual(expected_vars, _osx_support._check_for_unavailable_sdk( config_vars)) def test__check_for_unavailable_sdk_alternate(self): # bpo-38360: also test the alternate single-argument form of -isysroot config_vars = { 'CC': 'clang', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ' '-isysroot/Developer/SDKs/MacOSX10.1.sdk', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. -isysroot/Developer/SDKs/MacOSX10.1.sdk', 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' '-isysroot/Developer/SDKs/MacOSX10.1.sdk -g', } expected_vars = { 'CC': 'clang', 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ' ' ', 'LDFLAGS': '-arch ppc -arch i386 -g', 'CPPFLAGS': '-I. ', 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' ' -g', } self.add_expected_saved_initial_values(config_vars, expected_vars) self.assertEqual(expected_vars, _osx_support._check_for_unavailable_sdk( config_vars)) def test_get_platform_osx(self): # Note, get_platform_osx is currently tested more extensively # indirectly by test_sysconfig and test_distutils config_vars = { 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ' '-isysroot /Developer/SDKs/MacOSX10.1.sdk', 'MACOSX_DEPLOYMENT_TARGET': '10.6', } result = _osx_support.get_platform_osx(config_vars, ' ', ' ', ' ') self.assertEqual(('macosx', '10.6', 'fat'), result) if __name__ == "__main__": unittest.main()
42.651376
81
0.559045
4a14cb8c541bbd286ce41c7efe74bd3814e45761
1,236
py
Python
readthedocs/search/faceted_search.py
houllette/readthedocs.org
e11a3e45e3f0a8299c7e9258e0dfd64c5c5acebe
[ "MIT" ]
null
null
null
readthedocs/search/faceted_search.py
houllette/readthedocs.org
e11a3e45e3f0a8299c7e9258e0dfd64c5c5acebe
[ "MIT" ]
null
null
null
readthedocs/search/faceted_search.py
houllette/readthedocs.org
e11a3e45e3f0a8299c7e9258e0dfd64c5c5acebe
[ "MIT" ]
null
null
null
from elasticsearch_dsl import FacetedSearch, TermsFacet from elasticsearch_dsl.query import SimpleQueryString, Bool class RTDFacetedSearch(FacetedSearch): """Overwrite the initialization in order too meet our needs""" # TODO: Remove the overwrite when the elastic/elasticsearch-dsl-py#916 # See more: https://github.com/elastic/elasticsearch-dsl-py/issues/916 def __init__(self, using, index, doc_types, model, fields=None, **kwargs): self.using = using self.index = index self.doc_types = doc_types self._model = model if fields: self.fields = fields super(RTDFacetedSearch, self).__init__(**kwargs) class ProjectSearch(RTDFacetedSearch): fields = ['name^5', 'description'] facets = { 'language': TermsFacet(field='language') } class FileSearch(RTDFacetedSearch): facets = { 'project': TermsFacet(field='project'), 'version': TermsFacet(field='version') } def query(self, search, query): """ Add query part to ``search`` Overriding because we pass ES Query object instead of string """ if query: search = search.query(query) return search
27.466667
78
0.649676
4a14cbcca0c9f2e2e1038f9628ad1cbf06fd160a
125,124
py
Python
test/engine/test_execute.py
ricardogferreira/sqlalchemy
fec2b6560c14bb28ee7fc9d21028844acf700b04
[ "MIT" ]
5,383
2018-11-27T07:34:03.000Z
2022-03-31T19:40:59.000Z
test/engine/test_execute.py
ricardogferreira/sqlalchemy
fec2b6560c14bb28ee7fc9d21028844acf700b04
[ "MIT" ]
2,719
2018-11-27T07:55:01.000Z
2022-03-31T22:09:44.000Z
test/engine/test_execute.py
ricardogferreira/sqlalchemy
fec2b6560c14bb28ee7fc9d21028844acf700b04
[ "MIT" ]
998
2018-11-28T09:34:38.000Z
2022-03-30T19:04:11.000Z
# coding: utf-8 from contextlib import contextmanager import re import threading import weakref import sqlalchemy as tsa from sqlalchemy import bindparam from sqlalchemy import create_engine from sqlalchemy import create_mock_engine from sqlalchemy import event from sqlalchemy import func from sqlalchemy import inspect from sqlalchemy import INT from sqlalchemy import Integer from sqlalchemy import LargeBinary from sqlalchemy import MetaData from sqlalchemy import select from sqlalchemy import Sequence from sqlalchemy import String from sqlalchemy import testing from sqlalchemy import text from sqlalchemy import TypeDecorator from sqlalchemy import util from sqlalchemy import VARCHAR from sqlalchemy.engine import default from sqlalchemy.engine.base import Connection from sqlalchemy.engine.base import Engine from sqlalchemy.pool import NullPool from sqlalchemy.pool import QueuePool from sqlalchemy.sql import column from sqlalchemy.sql import literal from sqlalchemy.sql.elements import literal_column from sqlalchemy.testing import assert_raises from sqlalchemy.testing import assert_raises_message from sqlalchemy.testing import config from sqlalchemy.testing import engines from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_raises_message from sqlalchemy.testing import expect_warnings from sqlalchemy.testing import fixtures from sqlalchemy.testing import is_ from sqlalchemy.testing import is_false from sqlalchemy.testing import is_not from sqlalchemy.testing import is_true from sqlalchemy.testing import mock from sqlalchemy.testing.assertions import expect_deprecated from sqlalchemy.testing.assertsql import CompiledSQL from sqlalchemy.testing.mock import call from sqlalchemy.testing.mock import Mock from sqlalchemy.testing.mock import patch from sqlalchemy.testing.schema import Column from sqlalchemy.testing.schema import Table from sqlalchemy.testing.util import gc_collect from sqlalchemy.testing.util import picklers from sqlalchemy.util import collections_abc class SomeException(Exception): pass class Foo(object): def __str__(self): return "foo" def __unicode__(self): return util.u("fóó") class ExecuteTest(fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): Table( "users", metadata, Column("user_id", INT, primary_key=True, autoincrement=False), Column("user_name", VARCHAR(20)), ) Table( "users_autoinc", metadata, Column( "user_id", INT, primary_key=True, test_needs_autoincrement=True ), Column("user_name", VARCHAR(20)), ) def test_no_params_option(self): stmt = ( "SELECT '%'" + testing.db.dialect.statement_compiler( testing.db.dialect, None ).default_from() ) with testing.db.connect() as conn: result = ( conn.execution_options(no_parameters=True) .exec_driver_sql(stmt) .scalar() ) eq_(result, "%") def test_raw_positional_invalid(self, connection): assert_raises_message( tsa.exc.ArgumentError, "List argument must consist only of tuples or dictionaries", connection.exec_driver_sql, "insert into users (user_id, user_name) " "values (?, ?)", [2, "fred"], ) assert_raises_message( tsa.exc.ArgumentError, "List argument must consist only of tuples or dictionaries", connection.exec_driver_sql, "insert into users (user_id, user_name) " "values (?, ?)", [[3, "ed"], [4, "horse"]], ) def test_raw_named_invalid(self, connection): # this is awkward b.c. this is just testing if regular Python # is raising TypeError if they happened to send arguments that # look like the legacy ones which also happen to conflict with # the positional signature for the method. some combinations # can get through and fail differently assert_raises( TypeError, connection.exec_driver_sql, "insert into users (user_id, user_name) " "values (%(id)s, %(name)s)", {"id": 2, "name": "ed"}, {"id": 3, "name": "horse"}, {"id": 4, "name": "horse"}, ) assert_raises( TypeError, connection.exec_driver_sql, "insert into users (user_id, user_name) " "values (%(id)s, %(name)s)", id=4, name="sally", ) @testing.requires.qmark_paramstyle def test_raw_qmark(self, connection): conn = connection conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (?, ?)", (1, "jack"), ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (?, ?)", (2, "fred"), ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (?, ?)", [(3, "ed"), (4, "horse")], ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (?, ?)", [(5, "barney"), (6, "donkey")], ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (?, ?)", (7, "sally"), ) res = conn.exec_driver_sql("select * from users order by user_id") assert res.fetchall() == [ (1, "jack"), (2, "fred"), (3, "ed"), (4, "horse"), (5, "barney"), (6, "donkey"), (7, "sally"), ] res = conn.exec_driver_sql( "select * from users where user_name=?", ("jack",) ) assert res.fetchall() == [(1, "jack")] @testing.requires.format_paramstyle def test_raw_sprintf(self, connection): conn = connection conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (%s, %s)", (1, "jack"), ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (%s, %s)", [(2, "ed"), (3, "horse")], ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (%s, %s)", (4, "sally"), ) conn.exec_driver_sql("insert into users (user_id) values (%s)", (5,)) res = conn.exec_driver_sql("select * from users order by user_id") assert res.fetchall() == [ (1, "jack"), (2, "ed"), (3, "horse"), (4, "sally"), (5, None), ] res = conn.exec_driver_sql( "select * from users where user_name=%s", ("jack",) ) assert res.fetchall() == [(1, "jack")] @testing.requires.pyformat_paramstyle def test_raw_python(self, connection): conn = connection conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (%(id)s, %(name)s)", {"id": 1, "name": "jack"}, ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (%(id)s, %(name)s)", [{"id": 2, "name": "ed"}, {"id": 3, "name": "horse"}], ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (%(id)s, %(name)s)", dict(id=4, name="sally"), ) res = conn.exec_driver_sql("select * from users order by user_id") assert res.fetchall() == [ (1, "jack"), (2, "ed"), (3, "horse"), (4, "sally"), ] @testing.requires.named_paramstyle def test_raw_named(self, connection): conn = connection conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (:id, :name)", {"id": 1, "name": "jack"}, ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (:id, :name)", [{"id": 2, "name": "ed"}, {"id": 3, "name": "horse"}], ) conn.exec_driver_sql( "insert into users (user_id, user_name) " "values (:id, :name)", {"id": 4, "name": "sally"}, ) res = conn.exec_driver_sql("select * from users order by user_id") assert res.fetchall() == [ (1, "jack"), (2, "ed"), (3, "horse"), (4, "sally"), ] def test_dialect_has_table_assertion(self): with expect_raises_message( tsa.exc.ArgumentError, r"The argument passed to Dialect.has_table\(\) should be a", ): testing.db.dialect.has_table(testing.db, "some_table") def test_exception_wrapping_dbapi(self): with testing.db.connect() as conn: # engine does not have exec_driver_sql assert_raises_message( tsa.exc.DBAPIError, r"not_a_valid_statement", conn.exec_driver_sql, "not_a_valid_statement", ) @testing.requires.sqlite def test_exception_wrapping_non_dbapi_error(self): e = create_engine("sqlite://") e.dialect.is_disconnect = is_disconnect = Mock() with e.connect() as c: c.connection.cursor = Mock( return_value=Mock( execute=Mock( side_effect=TypeError("I'm not a DBAPI error") ) ) ) assert_raises_message( TypeError, "I'm not a DBAPI error", c.exec_driver_sql, "select ", ) eq_(is_disconnect.call_count, 0) def test_exception_wrapping_non_standard_dbapi_error(self): class DBAPIError(Exception): pass class OperationalError(DBAPIError): pass class NonStandardException(OperationalError): pass # TODO: this test is assuming too much of arbitrary dialects and would # be better suited tested against a single mock dialect that does not # have any special behaviors with patch.object( testing.db.dialect, "dbapi", Mock(Error=DBAPIError) ), patch.object( testing.db.dialect, "is_disconnect", lambda *arg: False ), patch.object( testing.db.dialect, "do_execute", Mock(side_effect=NonStandardException), ), patch.object( testing.db.dialect.execution_ctx_cls, "handle_dbapi_exception", Mock(), ): with testing.db.connect() as conn: assert_raises( tsa.exc.OperationalError, conn.exec_driver_sql, "select 1" ) def test_exception_wrapping_non_dbapi_statement(self): class MyType(TypeDecorator): impl = Integer cache_ok = True def process_bind_param(self, value, dialect): raise SomeException("nope") def _go(conn): assert_raises_message( tsa.exc.StatementError, r"\(.*.SomeException\) " r"nope\n\[SQL\: u?SELECT 1 ", conn.execute, select(1).where(column("foo") == literal("bar", MyType())), ) with testing.db.connect() as conn: _go(conn) def test_not_an_executable(self): for obj in ( Table("foo", MetaData(), Column("x", Integer)), Column("x", Integer), tsa.and_(True), tsa.and_(True).compile(), column("foo"), column("foo").compile(), select(1).cte(), # select(1).subquery(), MetaData(), Integer(), tsa.Index(name="foo"), tsa.UniqueConstraint("x"), ): with testing.db.connect() as conn: assert_raises_message( tsa.exc.ObjectNotExecutableError, "Not an executable object", conn.execute, obj, ) def test_subquery_exec_warning(self): for obj in (select(1).alias(), select(1).subquery()): with testing.db.connect() as conn: with expect_deprecated( "Executing a subquery object is deprecated and will " "raise ObjectNotExecutableError" ): eq_(conn.execute(obj).scalar(), 1) def test_stmt_exception_bytestring_raised(self): name = util.u("méil") users = self.tables.users with testing.db.connect() as conn: assert_raises_message( tsa.exc.StatementError, util.u( "A value is required for bind parameter 'uname'\n" r".*SELECT users.user_name AS .m\xe9il." ) if util.py2k else util.u( "A value is required for bind parameter 'uname'\n" ".*SELECT users.user_name AS .méil." ), conn.execute, select(users.c.user_name.label(name)).where( users.c.user_name == bindparam("uname") ), {"uname_incorrect": "foo"}, ) def test_stmt_exception_bytestring_utf8(self): # uncommon case for Py3K, bytestring object passed # as the error message message = util.u("some message méil").encode("utf-8") err = tsa.exc.SQLAlchemyError(message) if util.py2k: # string passes it through eq_(str(err), message) # unicode accessor decodes to utf-8 eq_(unicode(err), util.u("some message méil")) # noqa F821 else: eq_(str(err), util.u("some message méil")) def test_stmt_exception_bytestring_latin1(self): # uncommon case for Py3K, bytestring object passed # as the error message message = util.u("some message méil").encode("latin-1") err = tsa.exc.SQLAlchemyError(message) if util.py2k: # string passes it through eq_(str(err), message) # unicode accessor decodes to utf-8 eq_(unicode(err), util.u("some message m\\xe9il")) # noqa F821 else: eq_(str(err), util.u("some message m\\xe9il")) def test_stmt_exception_unicode_hook_unicode(self): # uncommon case for Py2K, Unicode object passed # as the error message message = util.u("some message méil") err = tsa.exc.SQLAlchemyError(message) if util.py2k: eq_(unicode(err), util.u("some message méil")) # noqa F821 else: eq_(str(err), util.u("some message méil")) def test_stmt_exception_object_arg(self): err = tsa.exc.SQLAlchemyError(Foo()) eq_(str(err), "foo") if util.py2k: eq_(unicode(err), util.u("fóó")) # noqa F821 def test_stmt_exception_str_multi_args(self): err = tsa.exc.SQLAlchemyError("some message", 206) eq_(str(err), "('some message', 206)") def test_stmt_exception_str_multi_args_bytestring(self): message = util.u("some message méil").encode("utf-8") err = tsa.exc.SQLAlchemyError(message, 206) eq_(str(err), str((message, 206))) def test_stmt_exception_str_multi_args_unicode(self): message = util.u("some message méil") err = tsa.exc.SQLAlchemyError(message, 206) eq_(str(err), str((message, 206))) def test_stmt_exception_pickleable_no_dbapi(self): self._test_stmt_exception_pickleable(Exception("hello world")) @testing.crashes( "postgresql+psycopg2", "Older versions don't support cursor pickling, newer ones do", ) @testing.fails_on( "mysql+oursql", "Exception doesn't come back exactly the same from pickle", ) @testing.fails_on( "mysql+mysqlconnector", "Exception doesn't come back exactly the same from pickle", ) @testing.fails_on( "oracle+cx_oracle", "cx_oracle exception seems to be having " "some issue with pickling", ) def test_stmt_exception_pickleable_plus_dbapi(self): raw = testing.db.raw_connection() the_orig = None try: try: cursor = raw.cursor() cursor.execute("SELECTINCORRECT") except testing.db.dialect.dbapi.Error as orig: # py3k has "orig" in local scope... the_orig = orig finally: raw.close() self._test_stmt_exception_pickleable(the_orig) def _test_stmt_exception_pickleable(self, orig): for sa_exc in ( tsa.exc.StatementError( "some error", "select * from table", {"foo": "bar"}, orig, False, ), tsa.exc.InterfaceError( "select * from table", {"foo": "bar"}, orig, True ), tsa.exc.NoReferencedTableError("message", "tname"), tsa.exc.NoReferencedColumnError("message", "tname", "cname"), tsa.exc.CircularDependencyError( "some message", [1, 2, 3], [(1, 2), (3, 4)] ), ): for loads, dumps in picklers(): repickled = loads(dumps(sa_exc)) eq_(repickled.args[0], sa_exc.args[0]) if isinstance(sa_exc, tsa.exc.StatementError): eq_(repickled.params, {"foo": "bar"}) eq_(repickled.statement, sa_exc.statement) if hasattr(sa_exc, "connection_invalidated"): eq_( repickled.connection_invalidated, sa_exc.connection_invalidated, ) eq_(repickled.orig.args[0], orig.args[0]) def test_dont_wrap_mixin(self): class MyException(Exception, tsa.exc.DontWrapMixin): pass class MyType(TypeDecorator): impl = Integer cache_ok = True def process_bind_param(self, value, dialect): raise MyException("nope") def _go(conn): assert_raises_message( MyException, "nope", conn.execute, select(1).where(column("foo") == literal("bar", MyType())), ) conn = testing.db.connect() try: _go(conn) finally: conn.close() def test_empty_insert(self, connection): """test that execute() interprets [] as a list with no params""" users_autoinc = self.tables.users_autoinc connection.execute( users_autoinc.insert().values(user_name=bindparam("name", None)), [], ) eq_(connection.execute(users_autoinc.select()).fetchall(), [(1, None)]) @testing.only_on("sqlite") def test_execute_compiled_favors_compiled_paramstyle(self): users = self.tables.users with patch.object(testing.db.dialect, "do_execute") as do_exec: stmt = users.update().values(user_id=1, user_name="foo") d1 = default.DefaultDialect(paramstyle="format") d2 = default.DefaultDialect(paramstyle="pyformat") with testing.db.begin() as conn: conn.execute(stmt.compile(dialect=d1)) conn.execute(stmt.compile(dialect=d2)) eq_( do_exec.mock_calls, [ call( mock.ANY, "UPDATE users SET user_id=%s, user_name=%s", (1, "foo"), mock.ANY, ), call( mock.ANY, "UPDATE users SET user_id=%(user_id)s, " "user_name=%(user_name)s", {"user_name": "foo", "user_id": 1}, mock.ANY, ), ], ) @testing.requires.ad_hoc_engines def test_engine_level_options(self): eng = engines.testing_engine( options={"execution_options": {"foo": "bar"}} ) with eng.connect() as conn: eq_(conn._execution_options["foo"], "bar") eq_( conn.execution_options(bat="hoho")._execution_options["foo"], "bar", ) eq_( conn.execution_options(bat="hoho")._execution_options["bat"], "hoho", ) eq_( conn.execution_options(foo="hoho")._execution_options["foo"], "hoho", ) eng.update_execution_options(foo="hoho") conn = eng.connect() eq_(conn._execution_options["foo"], "hoho") @testing.requires.ad_hoc_engines def test_generative_engine_execution_options(self): eng = engines.testing_engine( options={"execution_options": {"base": "x1"}} ) is_(eng.engine, eng) eng1 = eng.execution_options(foo="b1") is_(eng1.engine, eng1) eng2 = eng.execution_options(foo="b2") eng1a = eng1.execution_options(bar="a1") eng2a = eng2.execution_options(foo="b3", bar="a2") is_(eng2a.engine, eng2a) eq_(eng._execution_options, {"base": "x1"}) eq_(eng1._execution_options, {"base": "x1", "foo": "b1"}) eq_(eng2._execution_options, {"base": "x1", "foo": "b2"}) eq_(eng1a._execution_options, {"base": "x1", "foo": "b1", "bar": "a1"}) eq_(eng2a._execution_options, {"base": "x1", "foo": "b3", "bar": "a2"}) is_(eng1a.pool, eng.pool) # test pool is shared eng2.dispose() is_(eng1a.pool, eng2.pool) is_(eng.pool, eng2.pool) @testing.requires.ad_hoc_engines def test_autocommit_option_no_issue_first_connect(self): eng = create_engine(testing.db.url) eng.update_execution_options(autocommit=True) conn = eng.connect() eq_(conn._execution_options, {"autocommit": True}) conn.close() def test_initialize_rollback(self): """test a rollback happens during first connect""" eng = create_engine(testing.db.url) with patch.object(eng.dialect, "do_rollback") as do_rollback: assert do_rollback.call_count == 0 connection = eng.connect() assert do_rollback.call_count == 1 connection.close() @testing.requires.ad_hoc_engines def test_dialect_init_uses_options(self): eng = create_engine(testing.db.url) def my_init(connection): connection.execution_options(foo="bar").execute(select(1)) with patch.object(eng.dialect, "initialize", my_init): conn = eng.connect() eq_(conn._execution_options, {}) conn.close() @testing.requires.ad_hoc_engines def test_generative_engine_event_dispatch_hasevents(self): def l1(*arg, **kw): pass eng = create_engine(testing.db.url) assert not eng._has_events event.listen(eng, "before_execute", l1) eng2 = eng.execution_options(foo="bar") assert eng2._has_events def test_works_after_dispose(self): eng = create_engine(testing.db.url) for i in range(3): with eng.connect() as conn: eq_(conn.scalar(select(1)), 1) eng.dispose() def test_works_after_dispose_testing_engine(self): eng = engines.testing_engine() for i in range(3): with eng.connect() as conn: eq_(conn.scalar(select(1)), 1) eng.dispose() def test_scalar(self, connection): conn = connection users = self.tables.users conn.execute( users.insert(), [ {"user_id": 1, "user_name": "sandy"}, {"user_id": 2, "user_name": "spongebob"}, ], ) res = conn.scalar(select(users.c.user_name).order_by(users.c.user_id)) eq_(res, "sandy") def test_scalars(self, connection): conn = connection users = self.tables.users conn.execute( users.insert(), [ {"user_id": 1, "user_name": "sandy"}, {"user_id": 2, "user_name": "spongebob"}, ], ) res = conn.scalars(select(users.c.user_name).order_by(users.c.user_id)) eq_(res.all(), ["sandy", "spongebob"]) class UnicodeReturnsTest(fixtures.TestBase): @testing.requires.python3 def test_unicode_test_not_in_python3(self): eng = engines.testing_engine() eng.dialect.returns_unicode_strings = String.RETURNS_UNKNOWN assert_raises_message( tsa.exc.InvalidRequestError, "RETURNS_UNKNOWN is unsupported in Python 3", eng.connect, ) @testing.requires.python2 def test_unicode_test_fails_warning(self): class MockCursor(engines.DBAPIProxyCursor): def execute(self, stmt, params=None, **kw): if "test unicode returns" in stmt: raise self.engine.dialect.dbapi.DatabaseError("boom") else: return super(MockCursor, self).execute(stmt, params, **kw) eng = engines.proxying_engine(cursor_cls=MockCursor) with testing.expect_warnings( "Exception attempting to detect unicode returns" ): eng.connect() # because plain varchar passed, we don't know the correct answer eq_(eng.dialect.returns_unicode_strings, String.RETURNS_CONDITIONAL) eng.dispose() class ConvenienceExecuteTest(fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): cls.table = Table( "exec_test", metadata, Column("a", Integer), Column("b", Integer), test_needs_acid=True, ) def _trans_fn(self, is_transaction=False): def go(conn, x, value=None): if is_transaction: conn = conn.connection conn.execute(self.table.insert().values(a=x, b=value)) return go def _trans_rollback_fn(self, is_transaction=False): def go(conn, x, value=None): if is_transaction: conn = conn.connection conn.execute(self.table.insert().values(a=x, b=value)) raise SomeException("breakage") return go def _assert_no_data(self): with testing.db.connect() as conn: eq_( conn.scalar(select(func.count("*")).select_from(self.table)), 0, ) def _assert_fn(self, x, value=None): with testing.db.connect() as conn: eq_(conn.execute(self.table.select()).fetchall(), [(x, value)]) def test_transaction_engine_ctx_commit(self): fn = self._trans_fn() ctx = testing.db.begin() testing.run_as_contextmanager(ctx, fn, 5, value=8) self._assert_fn(5, value=8) def test_transaction_engine_ctx_begin_fails(self): engine = engines.testing_engine() mock_connection = Mock( return_value=Mock(begin=Mock(side_effect=Exception("boom"))) ) engine._connection_cls = mock_connection assert_raises(Exception, engine.begin) eq_(mock_connection.return_value.close.mock_calls, [call()]) def test_transaction_engine_ctx_rollback(self): fn = self._trans_rollback_fn() ctx = testing.db.begin() assert_raises_message( Exception, "breakage", testing.run_as_contextmanager, ctx, fn, 5, value=8, ) self._assert_no_data() def test_transaction_connection_ctx_commit(self): fn = self._trans_fn(True) with testing.db.connect() as conn: ctx = conn.begin() testing.run_as_contextmanager(ctx, fn, 5, value=8) self._assert_fn(5, value=8) def test_transaction_connection_ctx_rollback(self): fn = self._trans_rollback_fn(True) with testing.db.connect() as conn: ctx = conn.begin() assert_raises_message( Exception, "breakage", testing.run_as_contextmanager, ctx, fn, 5, value=8, ) self._assert_no_data() def test_connection_as_ctx(self): fn = self._trans_fn() with testing.db.begin() as conn: fn(conn, 5, value=8) self._assert_fn(5, value=8) @testing.fails_on("mysql+oursql", "oursql bug ? getting wrong rowcount") def test_connect_as_ctx_noautocommit(self): fn = self._trans_fn() self._assert_no_data() with testing.db.connect() as conn: ctx = conn.execution_options(autocommit=False) testing.run_as_contextmanager(ctx, fn, 5, value=8) # autocommit is off self._assert_no_data() class CompiledCacheTest(fixtures.TestBase): __backend__ = True def test_cache(self, connection, metadata): users = Table( "users", metadata, Column( "user_id", INT, primary_key=True, test_needs_autoincrement=True ), Column("user_name", VARCHAR(20)), Column("extra_data", VARCHAR(20)), ) users.create(connection) conn = connection cache = {} cached_conn = conn.execution_options(compiled_cache=cache) ins = users.insert() with patch.object( ins, "_compiler", Mock(side_effect=ins._compiler) ) as compile_mock: cached_conn.execute(ins, {"user_name": "u1"}) cached_conn.execute(ins, {"user_name": "u2"}) cached_conn.execute(ins, {"user_name": "u3"}) eq_(compile_mock.call_count, 1) assert len(cache) == 1 eq_(conn.exec_driver_sql("select count(*) from users").scalar(), 3) @testing.only_on( ["sqlite", "mysql", "postgresql"], "uses blob value that is problematic for some DBAPIs", ) def test_cache_noleak_on_statement_values(self, metadata, connection): # This is a non regression test for an object reference leak caused # by the compiled_cache. photo = Table( "photo", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("photo_blob", LargeBinary()), ) metadata.create_all(connection) cache = {} cached_conn = connection.execution_options(compiled_cache=cache) class PhotoBlob(bytearray): pass blob = PhotoBlob(100) ref_blob = weakref.ref(blob) ins = photo.insert() with patch.object( ins, "_compiler", Mock(side_effect=ins._compiler) ) as compile_mock: cached_conn.execute(ins, {"photo_blob": blob}) eq_(compile_mock.call_count, 1) eq_(len(cache), 1) eq_( connection.exec_driver_sql("select count(*) from photo").scalar(), 1, ) del blob gc_collect() # The compiled statement cache should not hold any reference to the # the statement values (only the keys). eq_(ref_blob(), None) def test_keys_independent_of_ordering(self, connection, metadata): users = Table( "users", metadata, Column( "user_id", INT, primary_key=True, test_needs_autoincrement=True ), Column("user_name", VARCHAR(20)), Column("extra_data", VARCHAR(20)), ) users.create(connection) connection.execute( users.insert(), {"user_id": 1, "user_name": "u1", "extra_data": "e1"}, ) cache = {} cached_conn = connection.execution_options(compiled_cache=cache) upd = users.update().where(users.c.user_id == bindparam("b_user_id")) with patch.object( upd, "_compiler", Mock(side_effect=upd._compiler) ) as compile_mock: cached_conn.execute( upd, util.OrderedDict( [ ("b_user_id", 1), ("user_name", "u2"), ("extra_data", "e2"), ] ), ) cached_conn.execute( upd, util.OrderedDict( [ ("b_user_id", 1), ("extra_data", "e3"), ("user_name", "u3"), ] ), ) cached_conn.execute( upd, util.OrderedDict( [ ("extra_data", "e4"), ("user_name", "u4"), ("b_user_id", 1), ] ), ) eq_(compile_mock.call_count, 1) eq_(len(cache), 1) @testing.requires.schemas def test_schema_translate_in_key(self, metadata, connection): Table("x", metadata, Column("q", Integer)) Table("x", metadata, Column("q", Integer), schema=config.test_schema) metadata.create_all(connection) m = MetaData() t1 = Table("x", m, Column("q", Integer)) ins = t1.insert() stmt = select(t1.c.q) cache = {} conn = connection.execution_options(compiled_cache=cache) conn.execute(ins, {"q": 1}) eq_(conn.scalar(stmt), 1) conn = connection.execution_options( compiled_cache=cache, schema_translate_map={None: config.test_schema}, ) conn.execute(ins, {"q": 2}) eq_(conn.scalar(stmt), 2) conn = connection.execution_options( compiled_cache=cache, schema_translate_map={None: None}, ) # should use default schema again even though statement # was compiled with test_schema in the map eq_(conn.scalar(stmt), 1) conn = connection.execution_options( compiled_cache=cache, ) eq_(conn.scalar(stmt), 1) class MockStrategyTest(fixtures.TestBase): def _engine_fixture(self): buf = util.StringIO() def dump(sql, *multiparams, **params): buf.write(util.text_type(sql.compile(dialect=engine.dialect))) engine = create_mock_engine("postgresql://", executor=dump) return engine, buf def test_sequence_not_duped(self): engine, buf = self._engine_fixture() metadata = MetaData() t = Table( "testtable", metadata, Column( "pk", Integer, Sequence("testtable_pk_seq"), primary_key=True, ), ) t.create(engine) t.drop(engine) eq_(re.findall(r"CREATE (\w+)", buf.getvalue()), ["SEQUENCE", "TABLE"]) eq_(re.findall(r"DROP (\w+)", buf.getvalue()), ["TABLE", "SEQUENCE"]) class SchemaTranslateTest(fixtures.TestBase, testing.AssertsExecutionResults): __requires__ = ("schemas",) __backend__ = True @testing.fixture def plain_tables(self, metadata): t1 = Table( "t1", metadata, Column("x", Integer), schema=config.test_schema ) t2 = Table( "t2", metadata, Column("x", Integer), schema=config.test_schema ) t3 = Table("t3", metadata, Column("x", Integer), schema=None) return t1, t2, t3 def test_create_table(self, plain_tables, connection): map_ = { None: config.test_schema, "foo": config.test_schema, "bar": None, } metadata = MetaData() t1 = Table("t1", metadata, Column("x", Integer)) t2 = Table("t2", metadata, Column("x", Integer), schema="foo") t3 = Table("t3", metadata, Column("x", Integer), schema="bar") with self.sql_execution_asserter(connection) as asserter: conn = connection.execution_options(schema_translate_map=map_) t1.create(conn) t2.create(conn) t3.create(conn) t3.drop(conn) t2.drop(conn) t1.drop(conn) asserter.assert_( CompiledSQL("CREATE TABLE [SCHEMA__none].t1 (x INTEGER)"), CompiledSQL("CREATE TABLE [SCHEMA_foo].t2 (x INTEGER)"), CompiledSQL("CREATE TABLE [SCHEMA_bar].t3 (x INTEGER)"), CompiledSQL("DROP TABLE [SCHEMA_bar].t3"), CompiledSQL("DROP TABLE [SCHEMA_foo].t2"), CompiledSQL("DROP TABLE [SCHEMA__none].t1"), ) def test_ddl_hastable(self, plain_tables, connection): map_ = { None: config.test_schema, "foo": config.test_schema, "bar": None, } metadata = MetaData() Table("t1", metadata, Column("x", Integer)) Table("t2", metadata, Column("x", Integer), schema="foo") Table("t3", metadata, Column("x", Integer), schema="bar") conn = connection.execution_options(schema_translate_map=map_) metadata.create_all(conn) insp = inspect(connection) is_true(insp.has_table("t1", schema=config.test_schema)) is_true(insp.has_table("t2", schema=config.test_schema)) is_true(insp.has_table("t3", schema=None)) conn = connection.execution_options(schema_translate_map=map_) # if this test fails, the tables won't get dropped. so need a # more robust fixture for this metadata.drop_all(conn) insp = inspect(connection) is_false(insp.has_table("t1", schema=config.test_schema)) is_false(insp.has_table("t2", schema=config.test_schema)) is_false(insp.has_table("t3", schema=None)) def test_option_on_execute(self, plain_tables, connection): # provided by metadata fixture provided by plain_tables fixture self.metadata.create_all(connection) map_ = { None: config.test_schema, "foo": config.test_schema, "bar": None, } metadata = MetaData() t1 = Table("t1", metadata, Column("x", Integer)) t2 = Table("t2", metadata, Column("x", Integer), schema="foo") t3 = Table("t3", metadata, Column("x", Integer), schema="bar") with self.sql_execution_asserter(connection) as asserter: conn = connection execution_options = {"schema_translate_map": map_} conn._execute_20( t1.insert(), {"x": 1}, execution_options=execution_options ) conn._execute_20( t2.insert(), {"x": 1}, execution_options=execution_options ) conn._execute_20( t3.insert(), {"x": 1}, execution_options=execution_options ) conn._execute_20( t1.update().values(x=1).where(t1.c.x == 1), execution_options=execution_options, ) conn._execute_20( t2.update().values(x=2).where(t2.c.x == 1), execution_options=execution_options, ) conn._execute_20( t3.update().values(x=3).where(t3.c.x == 1), execution_options=execution_options, ) eq_( conn._execute_20( select(t1.c.x), execution_options=execution_options ).scalar(), 1, ) eq_( conn._execute_20( select(t2.c.x), execution_options=execution_options ).scalar(), 2, ) eq_( conn._execute_20( select(t3.c.x), execution_options=execution_options ).scalar(), 3, ) conn._execute_20(t1.delete(), execution_options=execution_options) conn._execute_20(t2.delete(), execution_options=execution_options) conn._execute_20(t3.delete(), execution_options=execution_options) asserter.assert_( CompiledSQL("INSERT INTO [SCHEMA__none].t1 (x) VALUES (:x)"), CompiledSQL("INSERT INTO [SCHEMA_foo].t2 (x) VALUES (:x)"), CompiledSQL("INSERT INTO [SCHEMA_bar].t3 (x) VALUES (:x)"), CompiledSQL( "UPDATE [SCHEMA__none].t1 SET x=:x WHERE " "[SCHEMA__none].t1.x = :x_1" ), CompiledSQL( "UPDATE [SCHEMA_foo].t2 SET x=:x WHERE " "[SCHEMA_foo].t2.x = :x_1" ), CompiledSQL( "UPDATE [SCHEMA_bar].t3 SET x=:x WHERE " "[SCHEMA_bar].t3.x = :x_1" ), CompiledSQL("SELECT [SCHEMA__none].t1.x FROM [SCHEMA__none].t1"), CompiledSQL("SELECT [SCHEMA_foo].t2.x FROM [SCHEMA_foo].t2"), CompiledSQL("SELECT [SCHEMA_bar].t3.x FROM [SCHEMA_bar].t3"), CompiledSQL("DELETE FROM [SCHEMA__none].t1"), CompiledSQL("DELETE FROM [SCHEMA_foo].t2"), CompiledSQL("DELETE FROM [SCHEMA_bar].t3"), ) def test_crud(self, plain_tables, connection): # provided by metadata fixture provided by plain_tables fixture self.metadata.create_all(connection) map_ = { None: config.test_schema, "foo": config.test_schema, "bar": None, } metadata = MetaData() t1 = Table("t1", metadata, Column("x", Integer)) t2 = Table("t2", metadata, Column("x", Integer), schema="foo") t3 = Table("t3", metadata, Column("x", Integer), schema="bar") with self.sql_execution_asserter(connection) as asserter: conn = connection.execution_options(schema_translate_map=map_) conn.execute(t1.insert(), {"x": 1}) conn.execute(t2.insert(), {"x": 1}) conn.execute(t3.insert(), {"x": 1}) conn.execute(t1.update().values(x=1).where(t1.c.x == 1)) conn.execute(t2.update().values(x=2).where(t2.c.x == 1)) conn.execute(t3.update().values(x=3).where(t3.c.x == 1)) eq_(conn.scalar(select(t1.c.x)), 1) eq_(conn.scalar(select(t2.c.x)), 2) eq_(conn.scalar(select(t3.c.x)), 3) conn.execute(t1.delete()) conn.execute(t2.delete()) conn.execute(t3.delete()) asserter.assert_( CompiledSQL("INSERT INTO [SCHEMA__none].t1 (x) VALUES (:x)"), CompiledSQL("INSERT INTO [SCHEMA_foo].t2 (x) VALUES (:x)"), CompiledSQL("INSERT INTO [SCHEMA_bar].t3 (x) VALUES (:x)"), CompiledSQL( "UPDATE [SCHEMA__none].t1 SET x=:x WHERE " "[SCHEMA__none].t1.x = :x_1" ), CompiledSQL( "UPDATE [SCHEMA_foo].t2 SET x=:x WHERE " "[SCHEMA_foo].t2.x = :x_1" ), CompiledSQL( "UPDATE [SCHEMA_bar].t3 SET x=:x WHERE " "[SCHEMA_bar].t3.x = :x_1" ), CompiledSQL("SELECT [SCHEMA__none].t1.x FROM [SCHEMA__none].t1"), CompiledSQL("SELECT [SCHEMA_foo].t2.x FROM [SCHEMA_foo].t2"), CompiledSQL("SELECT [SCHEMA_bar].t3.x FROM [SCHEMA_bar].t3"), CompiledSQL("DELETE FROM [SCHEMA__none].t1"), CompiledSQL("DELETE FROM [SCHEMA_foo].t2"), CompiledSQL("DELETE FROM [SCHEMA_bar].t3"), ) def test_via_engine(self, plain_tables, metadata): with config.db.begin() as connection: metadata.create_all(connection) map_ = { None: config.test_schema, "foo": config.test_schema, "bar": None, } metadata = MetaData() t2 = Table("t2", metadata, Column("x", Integer), schema="foo") with self.sql_execution_asserter(config.db) as asserter: eng = config.db.execution_options(schema_translate_map=map_) with eng.connect() as conn: conn.execute(select(t2.c.x)) asserter.assert_( CompiledSQL("SELECT [SCHEMA_foo].t2.x FROM [SCHEMA_foo].t2") ) class ExecutionOptionsTest(fixtures.TestBase): def test_dialect_conn_options(self, testing_engine): engine = testing_engine("sqlite://", options=dict(_initialize=False)) engine.dialect = Mock() with engine.connect() as conn: c2 = conn.execution_options(foo="bar") eq_( engine.dialect.set_connection_execution_options.mock_calls, [call(c2, {"foo": "bar"})], ) def test_dialect_engine_options(self, testing_engine): engine = testing_engine("sqlite://") engine.dialect = Mock() e2 = engine.execution_options(foo="bar") eq_( engine.dialect.set_engine_execution_options.mock_calls, [call(e2, {"foo": "bar"})], ) def test_dialect_engine_construction_options(self): dialect = Mock() engine = Engine( Mock(), dialect, Mock(), execution_options={"foo": "bar"} ) eq_( dialect.set_engine_execution_options.mock_calls, [call(engine, {"foo": "bar"})], ) def test_propagate_engine_to_connection(self, testing_engine): engine = testing_engine( "sqlite://", options=dict(execution_options={"foo": "bar"}) ) with engine.connect() as conn: eq_(conn._execution_options, {"foo": "bar"}) def test_propagate_option_engine_to_connection(self, testing_engine): e1 = testing_engine( "sqlite://", options=dict(execution_options={"foo": "bar"}) ) e2 = e1.execution_options(bat="hoho") c1 = e1.connect() c2 = e2.connect() eq_(c1._execution_options, {"foo": "bar"}) eq_(c2._execution_options, {"foo": "bar", "bat": "hoho"}) c1.close() c2.close() def test_get_engine_execution_options(self, testing_engine): engine = testing_engine("sqlite://") engine.dialect = Mock() e2 = engine.execution_options(foo="bar") eq_(e2.get_execution_options(), {"foo": "bar"}) def test_get_connection_execution_options(self, testing_engine): engine = testing_engine("sqlite://", options=dict(_initialize=False)) engine.dialect = Mock() with engine.connect() as conn: c = conn.execution_options(foo="bar") eq_(c.get_execution_options(), {"foo": "bar"}) class EngineEventsTest(fixtures.TestBase): __requires__ = ("ad_hoc_engines",) __backend__ = True def teardown_test(self): Engine.dispatch._clear() Engine._has_events = False def _assert_stmts(self, expected, received): list(received) for stmt, params, posn in expected: if not received: assert False, "Nothing available for stmt: %s" % stmt while received: teststmt, testparams, testmultiparams = received.pop(0) teststmt = ( re.compile(r"[\n\t ]+", re.M).sub(" ", teststmt).strip() ) if teststmt.startswith(stmt) and ( testparams == params or testparams == posn ): break def test_per_engine_independence(self, testing_engine): e1 = testing_engine(config.db_url) e2 = testing_engine(config.db_url) canary = Mock() event.listen(e1, "before_execute", canary) s1 = select(1) s2 = select(2) with e1.connect() as conn: conn.execute(s1) with e2.connect() as conn: conn.execute(s2) eq_([arg[1][1] for arg in canary.mock_calls], [s1]) event.listen(e2, "before_execute", canary) with e1.connect() as conn: conn.execute(s1) with e2.connect() as conn: conn.execute(s2) eq_([arg[1][1] for arg in canary.mock_calls], [s1, s1, s2]) def test_per_engine_plus_global(self, testing_engine): canary = Mock() event.listen(Engine, "before_execute", canary.be1) e1 = testing_engine(config.db_url) e2 = testing_engine(config.db_url) event.listen(e1, "before_execute", canary.be2) event.listen(Engine, "before_execute", canary.be3) with e1.connect() as conn: conn.execute(select(1)) eq_(canary.be1.call_count, 1) eq_(canary.be2.call_count, 1) with e2.connect() as conn: conn.execute(select(1)) eq_(canary.be1.call_count, 2) eq_(canary.be2.call_count, 1) eq_(canary.be3.call_count, 2) def test_emit_sql_in_autobegin(self, testing_engine): e1 = testing_engine(config.db_url) canary = Mock() @event.listens_for(e1, "begin") def begin(connection): result = connection.execute(select(1)).scalar() canary.got_result(result) with e1.connect() as conn: assert not conn._is_future with conn.begin(): conn.execute(select(1)).scalar() assert conn.in_transaction() assert not conn.in_transaction() eq_(canary.mock_calls, [call.got_result(1)]) def test_per_connection_plus_engine(self, testing_engine): canary = Mock() e1 = testing_engine(config.db_url) event.listen(e1, "before_execute", canary.be1) conn = e1.connect() event.listen(conn, "before_execute", canary.be2) conn.execute(select(1)) eq_(canary.be1.call_count, 1) eq_(canary.be2.call_count, 1) if testing.requires.legacy_engine.enabled: conn._branch().execute(select(1)) eq_(canary.be1.call_count, 2) eq_(canary.be2.call_count, 2) @testing.combinations( (True, False), (True, True), (False, False), argnames="mock_out_on_connect, add_our_own_onconnect", ) def test_insert_connect_is_definitely_first( self, mock_out_on_connect, add_our_own_onconnect, testing_engine ): """test issue #5708. We want to ensure that a single "connect" event may be invoked *before* dialect initialize as well as before dialect on_connects. This is also partially reliant on the changes we made as a result of #5497, however here we go further with the changes and remove use of the pool first_connect() event entirely so that the startup for a dialect is fully consistent. """ if mock_out_on_connect: if add_our_own_onconnect: def our_connect(connection): m1.our_connect("our connect event") patcher = mock.patch.object( config.db.dialect.__class__, "on_connect", lambda self: our_connect, ) else: patcher = mock.patch.object( config.db.dialect.__class__, "on_connect", lambda self: None, ) else: patcher = util.nullcontext() with patcher: e1 = testing_engine(config.db_url) initialize = e1.dialect.initialize def init(connection): initialize(connection) with mock.patch.object( e1.dialect, "initialize", side_effect=init ) as m1: @event.listens_for(e1, "connect", insert=True) def go1(dbapi_conn, xyz): m1.foo("custom event first") @event.listens_for(e1, "connect") def go2(dbapi_conn, xyz): m1.foo("custom event last") c1 = e1.connect() m1.bar("ok next connection") c2 = e1.connect() # this happens with sqlite singletonthreadpool. # we can almost use testing.requires.independent_connections # but sqlite file backend will also have independent # connections here. its_the_same_connection = ( c1.connection.dbapi_connection is c2.connection.dbapi_connection ) c1.close() c2.close() if add_our_own_onconnect: calls = [ mock.call.foo("custom event first"), mock.call.our_connect("our connect event"), mock.call(mock.ANY), mock.call.foo("custom event last"), mock.call.bar("ok next connection"), ] else: calls = [ mock.call.foo("custom event first"), mock.call(mock.ANY), mock.call.foo("custom event last"), mock.call.bar("ok next connection"), ] if not its_the_same_connection: if add_our_own_onconnect: calls.extend( [ mock.call.foo("custom event first"), mock.call.our_connect("our connect event"), mock.call.foo("custom event last"), ] ) else: calls.extend( [ mock.call.foo("custom event first"), mock.call.foo("custom event last"), ] ) eq_(m1.mock_calls, calls) def test_new_exec_driver_sql_no_events(self): m1 = Mock() def select1(db): return str(select(1).compile(dialect=db.dialect)) with testing.db.connect() as conn: event.listen(conn, "before_execute", m1.before_execute) event.listen(conn, "after_execute", m1.after_execute) conn.exec_driver_sql(select1(testing.db)) eq_(m1.mock_calls, []) def test_add_event_after_connect(self, testing_engine): # new feature as of #2978 canary = Mock() e1 = testing_engine(config.db_url, future=False) assert not e1._has_events conn = e1.connect() event.listen(e1, "before_execute", canary.be1) conn.execute(select(1)) eq_(canary.be1.call_count, 1) conn._branch().execute(select(1)) eq_(canary.be1.call_count, 2) def test_force_conn_events_false(self, testing_engine): canary = Mock() e1 = testing_engine(config.db_url, future=False) assert not e1._has_events event.listen(e1, "before_execute", canary.be1) conn = e1._connection_cls( e1, connection=e1.raw_connection(), _has_events=False ) conn.execute(select(1)) eq_(canary.be1.call_count, 0) conn._branch().execute(select(1)) eq_(canary.be1.call_count, 0) def test_cursor_events_ctx_execute_scalar(self, testing_engine): canary = Mock() e1 = testing_engine(config.db_url) event.listen(e1, "before_cursor_execute", canary.bce) event.listen(e1, "after_cursor_execute", canary.ace) stmt = str(select(1).compile(dialect=e1.dialect)) with e1.connect() as conn: dialect = conn.dialect ctx = dialect.execution_ctx_cls._init_statement( dialect, conn, conn.connection, {}, stmt, {} ) ctx._execute_scalar(stmt, Integer()) eq_( canary.bce.mock_calls, [call(conn, ctx.cursor, stmt, ctx.parameters[0], ctx, False)], ) eq_( canary.ace.mock_calls, [call(conn, ctx.cursor, stmt, ctx.parameters[0], ctx, False)], ) def test_cursor_events_execute(self, testing_engine): canary = Mock() e1 = testing_engine(config.db_url) event.listen(e1, "before_cursor_execute", canary.bce) event.listen(e1, "after_cursor_execute", canary.ace) stmt = str(select(1).compile(dialect=e1.dialect)) with e1.connect() as conn: result = conn.exec_driver_sql(stmt) eq_(result.scalar(), 1) ctx = result.context eq_( canary.bce.mock_calls, [call(conn, ctx.cursor, stmt, ctx.parameters[0], ctx, False)], ) eq_( canary.ace.mock_calls, [call(conn, ctx.cursor, stmt, ctx.parameters[0], ctx, False)], ) @testing.combinations( ( ([{"x": 5, "y": 10}, {"x": 8, "y": 9}],), {}, [{"x": 5, "y": 10}, {"x": 8, "y": 9}], {}, ), (({"z": 10},), {}, [], {"z": 10}), argnames="multiparams, params, expected_multiparams, expected_params", ) def test_modify_parameters_from_event_one( self, multiparams, params, expected_multiparams, expected_params, testing_engine, ): # this is testing both the normalization added to parameters # as of I97cb4d06adfcc6b889f10d01cc7775925cffb116 as well as # that the return value from the event is taken as the new set # of parameters. def before_execute( conn, clauseelement, multiparams, params, execution_options ): eq_(multiparams, expected_multiparams) eq_(params, expected_params) return clauseelement, (), {"q": "15"} def after_execute( conn, clauseelement, multiparams, params, result, execution_options ): eq_(multiparams, ()) eq_(params, {"q": "15"}) e1 = testing_engine(config.db_url) event.listen(e1, "before_execute", before_execute, retval=True) event.listen(e1, "after_execute", after_execute) with e1.connect() as conn: result = conn.execute( select(bindparam("q", type_=String)), *multiparams, **params ) eq_(result.all(), [("15",)]) @testing.provide_metadata def test_modify_parameters_from_event_two(self, connection): t = Table("t", self.metadata, Column("q", Integer)) t.create(connection) def before_execute( conn, clauseelement, multiparams, params, execution_options ): return clauseelement, [{"q": 15}, {"q": 19}], {} event.listen(connection, "before_execute", before_execute, retval=True) connection.execute(t.insert(), {"q": 12}) event.remove(connection, "before_execute", before_execute) eq_( connection.execute(select(t).order_by(t.c.q)).fetchall(), [(15,), (19,)], ) def test_modify_parameters_from_event_three( self, connection, testing_engine ): def before_execute( conn, clauseelement, multiparams, params, execution_options ): return clauseelement, [{"q": 15}, {"q": 19}], {"q": 7} e1 = testing_engine(config.db_url) event.listen(e1, "before_execute", before_execute, retval=True) with expect_raises_message( tsa.exc.InvalidRequestError, "Event handler can't return non-empty multiparams " "and params at the same time", ): with e1.connect() as conn: conn.execute(select(literal("1"))) @testing.only_on("sqlite") def test_dont_modify_statement_driversql(self, connection): m1 = mock.Mock() @event.listens_for(connection, "before_execute", retval=True) def _modify( conn, clauseelement, multiparams, params, execution_options ): m1.run_event() return clauseelement.replace("hi", "there"), multiparams, params # the event does not take effect for the "driver SQL" option eq_(connection.exec_driver_sql("select 'hi'").scalar(), "hi") # event is not called at all eq_(m1.mock_calls, []) @testing.combinations((True,), (False,), argnames="future") @testing.only_on("sqlite") def test_modify_statement_internal_driversql(self, connection, future): m1 = mock.Mock() @event.listens_for(connection, "before_execute", retval=True) def _modify( conn, clauseelement, multiparams, params, execution_options ): m1.run_event() return clauseelement.replace("hi", "there"), multiparams, params eq_( connection._exec_driver_sql( "select 'hi'", [], {}, {}, future=future ).scalar(), "hi" if future else "there", ) if future: eq_(m1.mock_calls, []) else: eq_(m1.mock_calls, [call.run_event()]) def test_modify_statement_clauseelement(self, connection): @event.listens_for(connection, "before_execute", retval=True) def _modify( conn, clauseelement, multiparams, params, execution_options ): return select(literal_column("'there'")), multiparams, params eq_(connection.scalar(select(literal_column("'hi'"))), "there") def test_argument_format_execute(self, testing_engine): def before_execute( conn, clauseelement, multiparams, params, execution_options ): assert isinstance(multiparams, (list, tuple)) assert isinstance(params, collections_abc.Mapping) def after_execute( conn, clauseelement, multiparams, params, result, execution_options ): assert isinstance(multiparams, (list, tuple)) assert isinstance(params, collections_abc.Mapping) e1 = testing_engine(config.db_url) event.listen(e1, "before_execute", before_execute) event.listen(e1, "after_execute", after_execute) with e1.connect() as conn: conn.execute(select(1)) conn.execute(select(1).compile(dialect=e1.dialect).statement) conn.execute(select(1).compile(dialect=e1.dialect)) conn._execute_compiled( select(1).compile(dialect=e1.dialect), (), {}, {} ) def test_execute_events(self): stmts = [] cursor_stmts = [] def execute( conn, clauseelement, multiparams, params, execution_options ): stmts.append((str(clauseelement), params, multiparams)) def cursor_execute( conn, cursor, statement, parameters, context, executemany ): cursor_stmts.append((str(statement), parameters, None)) # TODO: this test is kind of a mess for engine in [ engines.testing_engine(options=dict(implicit_returning=False)), engines.testing_engine( options=dict(implicit_returning=False) ).connect(), ]: event.listen(engine, "before_execute", execute) event.listen(engine, "before_cursor_execute", cursor_execute) m = MetaData() t1 = Table( "t1", m, Column("c1", Integer, primary_key=True), Column( "c2", String(50), default=func.lower("Foo"), primary_key=True, ), ) if isinstance(engine, Connection): ctx = None conn = engine else: ctx = conn = engine.connect() trans = conn.begin() try: m.create_all(conn, checkfirst=False) try: conn.execute(t1.insert(), dict(c1=5, c2="some data")) conn.execute(t1.insert(), dict(c1=6)) eq_( conn.execute(text("select * from t1")).fetchall(), [(5, "some data"), (6, "foo")], ) finally: m.drop_all(conn) trans.commit() finally: if ctx: ctx.close() compiled = [ ("CREATE TABLE t1", {}, None), ( "INSERT INTO t1 (c1, c2)", {"c2": "some data", "c1": 5}, (), ), ("INSERT INTO t1 (c1, c2)", {"c1": 6}, ()), ("select * from t1", {}, None), ("DROP TABLE t1", {}, None), ] cursor = [ ("CREATE TABLE t1", {}, ()), ( "INSERT INTO t1 (c1, c2)", {"c2": "some data", "c1": 5}, (5, "some data"), ), ("SELECT lower", {"lower_2": "Foo"}, ("Foo",)), ( "INSERT INTO t1 (c1, c2)", {"c2": "foo", "c1": 6}, (6, "foo"), ), ("select * from t1", {}, ()), ("DROP TABLE t1", {}, ()), ] self._assert_stmts(compiled, stmts) self._assert_stmts(cursor, cursor_stmts) def test_options(self): canary = [] def execute(conn, *args, **kw): canary.append("execute") def cursor_execute(conn, *args, **kw): canary.append("cursor_execute") engine = engines.testing_engine() event.listen(engine, "before_execute", execute) event.listen(engine, "before_cursor_execute", cursor_execute) conn = engine.connect() c2 = conn.execution_options(foo="bar") eq_(c2._execution_options, {"foo": "bar"}) c2.execute(select(1)) c3 = c2.execution_options(bar="bat") eq_(c3._execution_options, {"foo": "bar", "bar": "bat"}) eq_(canary, ["execute", "cursor_execute"]) @testing.requires.ad_hoc_engines def test_generative_engine_event_dispatch(self): canary = [] def l1(*arg, **kw): canary.append("l1") def l2(*arg, **kw): canary.append("l2") def l3(*arg, **kw): canary.append("l3") eng = engines.testing_engine( options={"execution_options": {"base": "x1"}} ) event.listen(eng, "before_execute", l1) eng1 = eng.execution_options(foo="b1") event.listen(eng, "before_execute", l2) event.listen(eng1, "before_execute", l3) with eng.connect() as conn: conn.execute(select(1)) eq_(canary, ["l1", "l2"]) with eng1.connect() as conn: conn.execute(select(1)) eq_(canary, ["l1", "l2", "l3", "l1", "l2"]) @testing.requires.ad_hoc_engines def test_clslevel_engine_event_options(self): canary = [] def l1(*arg, **kw): canary.append("l1") def l2(*arg, **kw): canary.append("l2") def l3(*arg, **kw): canary.append("l3") def l4(*arg, **kw): canary.append("l4") event.listen(Engine, "before_execute", l1) eng = engines.testing_engine( options={"execution_options": {"base": "x1"}} ) event.listen(eng, "before_execute", l2) eng1 = eng.execution_options(foo="b1") event.listen(eng, "before_execute", l3) event.listen(eng1, "before_execute", l4) with eng.connect() as conn: conn.execute(select(1)) eq_(canary, ["l1", "l2", "l3"]) with eng1.connect() as conn: conn.execute(select(1)) eq_(canary, ["l1", "l2", "l3", "l4", "l1", "l2", "l3"]) canary[:] = [] event.remove(Engine, "before_execute", l1) event.remove(eng1, "before_execute", l4) event.remove(eng, "before_execute", l3) with eng1.connect() as conn: conn.execute(select(1)) eq_(canary, ["l2"]) @testing.requires.ad_hoc_engines def test_cant_listen_to_option_engine(self): from sqlalchemy.engine import base def evt(*arg, **kw): pass assert_raises_message( tsa.exc.InvalidRequestError, r"Can't assign an event directly to the " "<class 'sqlalchemy.engine.base.OptionEngine'> class", event.listen, base.OptionEngine, "before_cursor_execute", evt, ) @testing.requires.ad_hoc_engines def test_dispose_event(self, testing_engine): canary = Mock() eng = testing_engine(testing.db.url) event.listen(eng, "engine_disposed", canary) conn = eng.connect() conn.close() eng.dispose() conn = eng.connect() conn.close() eq_(canary.mock_calls, [call(eng)]) eng.dispose() eq_(canary.mock_calls, [call(eng), call(eng)]) def test_retval_flag(self): canary = [] def tracker(name): def go(conn, *args, **kw): canary.append(name) return go def execute( conn, clauseelement, multiparams, params, execution_options ): canary.append("execute") return clauseelement, multiparams, params def cursor_execute( conn, cursor, statement, parameters, context, executemany ): canary.append("cursor_execute") return statement, parameters engine = engines.testing_engine() assert_raises( tsa.exc.ArgumentError, event.listen, engine, "begin", tracker("begin"), retval=True, ) event.listen(engine, "before_execute", execute, retval=True) event.listen( engine, "before_cursor_execute", cursor_execute, retval=True ) with engine.connect() as conn: conn.execute(select(1)) eq_(canary, ["execute", "cursor_execute"]) @testing.requires.legacy_engine def test_engine_connect(self): engine = engines.testing_engine() tracker = Mock() event.listen(engine, "engine_connect", tracker) c1 = engine.connect() c2 = c1._branch() c1.close() eq_(tracker.mock_calls, [call(c1, False), call(c2, True)]) def test_execution_options(self): engine = engines.testing_engine() engine_tracker = Mock() conn_tracker = Mock() event.listen(engine, "set_engine_execution_options", engine_tracker) event.listen(engine, "set_connection_execution_options", conn_tracker) e2 = engine.execution_options(e1="opt_e1") c1 = engine.connect() c2 = c1.execution_options(c1="opt_c1") c3 = e2.connect() c4 = c3.execution_options(c3="opt_c3") eq_(engine_tracker.mock_calls, [call(e2, {"e1": "opt_e1"})]) eq_( conn_tracker.mock_calls, [call(c2, {"c1": "opt_c1"}), call(c4, {"c3": "opt_c3"})], ) @testing.requires.sequences @testing.provide_metadata def test_cursor_execute(self): canary = [] def tracker(name): def go(conn, cursor, statement, parameters, context, executemany): canary.append((statement, context)) return go engine = engines.testing_engine() t = Table( "t", self.metadata, Column( "x", Integer, Sequence("t_id_seq"), primary_key=True, ), implicit_returning=False, ) self.metadata.create_all(engine) with engine.begin() as conn: event.listen( conn, "before_cursor_execute", tracker("cursor_execute") ) conn.execute(t.insert()) # we see the sequence pre-executed in the first call assert "t_id_seq" in canary[0][0] assert "INSERT" in canary[1][0] # same context is_(canary[0][1], canary[1][1]) def test_transactional(self): canary = [] def tracker(name): def go(conn, *args, **kw): canary.append(name) return go engine = engines.testing_engine() event.listen(engine, "before_execute", tracker("execute")) event.listen( engine, "before_cursor_execute", tracker("cursor_execute") ) event.listen(engine, "begin", tracker("begin")) event.listen(engine, "commit", tracker("commit")) event.listen(engine, "rollback", tracker("rollback")) with engine.connect() as conn: trans = conn.begin() conn.execute(select(1)) trans.rollback() trans = conn.begin() conn.execute(select(1)) trans.commit() eq_( canary, [ "begin", "execute", "cursor_execute", "rollback", "begin", "execute", "cursor_execute", "commit", ], ) def test_transactional_named(self): canary = [] def tracker(name): def go(*args, **kw): canary.append((name, set(kw))) return go engine = engines.testing_engine() event.listen(engine, "before_execute", tracker("execute"), named=True) event.listen( engine, "before_cursor_execute", tracker("cursor_execute"), named=True, ) event.listen(engine, "begin", tracker("begin"), named=True) event.listen(engine, "commit", tracker("commit"), named=True) event.listen(engine, "rollback", tracker("rollback"), named=True) with engine.connect() as conn: trans = conn.begin() conn.execute(select(1)) trans.rollback() trans = conn.begin() conn.execute(select(1)) trans.commit() eq_( canary, [ ("begin", set(["conn"])), ( "execute", set( [ "conn", "clauseelement", "multiparams", "params", "execution_options", ] ), ), ( "cursor_execute", set( [ "conn", "cursor", "executemany", "statement", "parameters", "context", ] ), ), ("rollback", set(["conn"])), ("begin", set(["conn"])), ( "execute", set( [ "conn", "clauseelement", "multiparams", "params", "execution_options", ] ), ), ( "cursor_execute", set( [ "conn", "cursor", "executemany", "statement", "parameters", "context", ] ), ), ("commit", set(["conn"])), ], ) @testing.requires.savepoints @testing.requires.two_phase_transactions def test_transactional_advanced(self): canary1 = [] def tracker1(name): def go(*args, **kw): canary1.append(name) return go canary2 = [] def tracker2(name): def go(*args, **kw): canary2.append(name) return go engine = engines.testing_engine() for name in [ "begin", "savepoint", "rollback_savepoint", "release_savepoint", "rollback", "begin_twophase", "prepare_twophase", "commit_twophase", ]: event.listen(engine, "%s" % name, tracker1(name)) conn = engine.connect() for name in [ "begin", "savepoint", "rollback_savepoint", "release_savepoint", "rollback", "begin_twophase", "prepare_twophase", "commit_twophase", ]: event.listen(conn, "%s" % name, tracker2(name)) trans = conn.begin() trans2 = conn.begin_nested() conn.execute(select(1)) trans2.rollback() trans2 = conn.begin_nested() conn.execute(select(1)) trans2.commit() trans.rollback() trans = conn.begin_twophase() conn.execute(select(1)) trans.prepare() trans.commit() eq_( canary1, [ "begin", "savepoint", "rollback_savepoint", "savepoint", "release_savepoint", "rollback", "begin_twophase", "prepare_twophase", "commit_twophase", ], ) eq_( canary2, [ "begin", "savepoint", "rollback_savepoint", "savepoint", "release_savepoint", "rollback", "begin_twophase", "prepare_twophase", "commit_twophase", ], ) class FutureEngineEventsTest(fixtures.FutureEngineMixin, EngineEventsTest): def test_future_fixture(self, testing_engine): e1 = testing_engine() assert e1._is_future with e1.connect() as conn: assert conn._is_future def test_emit_sql_in_autobegin(self, testing_engine): e1 = testing_engine(config.db_url) canary = Mock() @event.listens_for(e1, "begin") def begin(connection): result = connection.execute(select(1)).scalar() canary.got_result(result) with e1.connect() as conn: assert conn._is_future conn.execute(select(1)).scalar() assert conn.in_transaction() conn.commit() assert not conn.in_transaction() eq_(canary.mock_calls, [call.got_result(1)]) class HandleErrorTest(fixtures.TestBase): __requires__ = ("ad_hoc_engines",) __backend__ = True def teardown_test(self): Engine.dispatch._clear() Engine._has_events = False def test_handle_error(self): engine = engines.testing_engine() canary = Mock(return_value=None) event.listen(engine, "handle_error", canary) with engine.connect() as conn: try: conn.exec_driver_sql("SELECT FOO FROM I_DONT_EXIST") assert False except tsa.exc.DBAPIError as e: ctx = canary.mock_calls[0][1][0] eq_(ctx.original_exception, e.orig) is_(ctx.sqlalchemy_exception, e) eq_(ctx.statement, "SELECT FOO FROM I_DONT_EXIST") def test_exception_event_reraise(self): engine = engines.testing_engine() class MyException(Exception): pass @event.listens_for(engine, "handle_error", retval=True) def err(context): stmt = context.statement exception = context.original_exception if "ERROR ONE" in str(stmt): return MyException("my exception") elif "ERROR TWO" in str(stmt): return exception else: return None conn = engine.connect() # case 1: custom exception assert_raises_message( MyException, "my exception", conn.exec_driver_sql, "SELECT 'ERROR ONE' FROM I_DONT_EXIST", ) # case 2: return the DBAPI exception we're given; # no wrapping should occur assert_raises( conn.dialect.dbapi.Error, conn.exec_driver_sql, "SELECT 'ERROR TWO' FROM I_DONT_EXIST", ) # case 3: normal wrapping assert_raises( tsa.exc.DBAPIError, conn.exec_driver_sql, "SELECT 'ERROR THREE' FROM I_DONT_EXIST", ) def test_exception_event_reraise_chaining(self): engine = engines.testing_engine() class MyException1(Exception): pass class MyException2(Exception): pass class MyException3(Exception): pass @event.listens_for(engine, "handle_error", retval=True) def err1(context): stmt = context.statement if ( "ERROR ONE" in str(stmt) or "ERROR TWO" in str(stmt) or "ERROR THREE" in str(stmt) ): return MyException1("my exception") elif "ERROR FOUR" in str(stmt): raise MyException3("my exception short circuit") @event.listens_for(engine, "handle_error", retval=True) def err2(context): stmt = context.statement if ( "ERROR ONE" in str(stmt) or "ERROR FOUR" in str(stmt) ) and isinstance(context.chained_exception, MyException1): raise MyException2("my exception chained") elif "ERROR TWO" in str(stmt): return context.chained_exception else: return None conn = engine.connect() with patch.object( engine.dialect.execution_ctx_cls, "handle_dbapi_exception" ) as patched: assert_raises_message( MyException2, "my exception chained", conn.exec_driver_sql, "SELECT 'ERROR ONE' FROM I_DONT_EXIST", ) eq_(patched.call_count, 1) with patch.object( engine.dialect.execution_ctx_cls, "handle_dbapi_exception" ) as patched: assert_raises( MyException1, conn.exec_driver_sql, "SELECT 'ERROR TWO' FROM I_DONT_EXIST", ) eq_(patched.call_count, 1) with patch.object( engine.dialect.execution_ctx_cls, "handle_dbapi_exception" ) as patched: # test that non None from err1 isn't cancelled out # by err2 assert_raises( MyException1, conn.exec_driver_sql, "SELECT 'ERROR THREE' FROM I_DONT_EXIST", ) eq_(patched.call_count, 1) with patch.object( engine.dialect.execution_ctx_cls, "handle_dbapi_exception" ) as patched: assert_raises( tsa.exc.DBAPIError, conn.exec_driver_sql, "SELECT 'ERROR FIVE' FROM I_DONT_EXIST", ) eq_(patched.call_count, 1) with patch.object( engine.dialect.execution_ctx_cls, "handle_dbapi_exception" ) as patched: assert_raises_message( MyException3, "my exception short circuit", conn.exec_driver_sql, "SELECT 'ERROR FOUR' FROM I_DONT_EXIST", ) eq_(patched.call_count, 1) def test_exception_autorollback_fails(self): engine = engines.testing_engine() conn = engine.connect() def boom(connection): raise engine.dialect.dbapi.OperationalError("rollback failed") with expect_warnings( r"An exception has occurred during handling of a previous " r"exception. The previous exception " r"is.*(?:i_dont_exist|does not exist)", py2konly=True, ): with patch.object(conn.dialect, "do_rollback", boom): assert_raises_message( tsa.exc.OperationalError, "rollback failed", conn.exec_driver_sql, "insert into i_dont_exist (x) values ('y')", ) def test_exception_event_ad_hoc_context(self): """test that handle_error is called with a context in cases where _handle_dbapi_error() is normally called without any context. """ engine = engines.testing_engine() listener = Mock(return_value=None) event.listen(engine, "handle_error", listener) nope = SomeException("nope") class MyType(TypeDecorator): impl = Integer cache_ok = True def process_bind_param(self, value, dialect): raise nope with engine.connect() as conn: assert_raises_message( tsa.exc.StatementError, r"\(.*.SomeException\) " r"nope\n\[SQL\: u?SELECT 1 ", conn.execute, select(1).where(column("foo") == literal("bar", MyType())), ) ctx = listener.mock_calls[0][1][0] assert ctx.statement.startswith("SELECT 1 ") is_(ctx.is_disconnect, False) is_(ctx.original_exception, nope) def test_exception_event_non_dbapi_error(self): """test that handle_error is called with a context in cases where DBAPI raises an exception that is not a DBAPI exception, e.g. internal errors or encoding problems. """ engine = engines.testing_engine() listener = Mock(return_value=None) event.listen(engine, "handle_error", listener) nope = TypeError("I'm not a DBAPI error") with engine.connect() as c: c.connection.cursor = Mock( return_value=Mock(execute=Mock(side_effect=nope)) ) assert_raises_message( TypeError, "I'm not a DBAPI error", c.exec_driver_sql, "select ", ) ctx = listener.mock_calls[0][1][0] eq_(ctx.statement, "select ") is_(ctx.is_disconnect, False) is_(ctx.original_exception, nope) def test_exception_event_disable_handlers(self): engine = engines.testing_engine() class MyException1(Exception): pass @event.listens_for(engine, "handle_error") def err1(context): stmt = context.statement if "ERROR_ONE" in str(stmt): raise MyException1("my exception short circuit") with engine.connect() as conn: assert_raises( tsa.exc.DBAPIError, conn.execution_options( skip_user_error_events=True ).exec_driver_sql, "SELECT ERROR_ONE FROM I_DONT_EXIST", ) assert_raises( MyException1, conn.execution_options( skip_user_error_events=False ).exec_driver_sql, "SELECT ERROR_ONE FROM I_DONT_EXIST", ) def _test_alter_disconnect(self, orig_error, evt_value): engine = engines.testing_engine() @event.listens_for(engine, "handle_error") def evt(ctx): ctx.is_disconnect = evt_value with patch.object( engine.dialect, "is_disconnect", Mock(return_value=orig_error) ): with engine.connect() as c: try: c.exec_driver_sql("SELECT x FROM nonexistent") assert False except tsa.exc.StatementError as st: eq_(st.connection_invalidated, evt_value) def test_alter_disconnect_to_true(self): self._test_alter_disconnect(False, True) self._test_alter_disconnect(True, True) def test_alter_disconnect_to_false(self): self._test_alter_disconnect(True, False) self._test_alter_disconnect(False, False) @testing.requires.independent_connections def _test_alter_invalidate_pool_to_false(self, set_to_false): orig_error = True engine = engines.testing_engine() @event.listens_for(engine, "handle_error") def evt(ctx): if set_to_false: ctx.invalidate_pool_on_disconnect = False c1, c2, c3 = ( engine.pool.connect(), engine.pool.connect(), engine.pool.connect(), ) crecs = [conn._connection_record for conn in (c1, c2, c3)] c1.close() c2.close() c3.close() with patch.object( engine.dialect, "is_disconnect", Mock(return_value=orig_error) ): with engine.connect() as c: target_crec = c.connection._connection_record try: c.exec_driver_sql("SELECT x FROM nonexistent") assert False except tsa.exc.StatementError as st: eq_(st.connection_invalidated, True) for crec in crecs: if crec is target_crec or not set_to_false: is_not(crec.dbapi_connection, crec.get_connection()) else: is_(crec.dbapi_connection, crec.get_connection()) def test_alter_invalidate_pool_to_false(self): self._test_alter_invalidate_pool_to_false(True) def test_alter_invalidate_pool_stays_true(self): self._test_alter_invalidate_pool_to_false(False) def test_handle_error_event_connect_isolation_level(self): engine = engines.testing_engine() class MySpecialException(Exception): pass @event.listens_for(engine, "handle_error") def handle_error(ctx): raise MySpecialException("failed operation") ProgrammingError = engine.dialect.dbapi.ProgrammingError with engine.connect() as conn: with patch.object( conn.dialect, "get_isolation_level", Mock(side_effect=ProgrammingError("random error")), ): assert_raises(MySpecialException, conn.get_isolation_level) @testing.only_on("sqlite+pysqlite") def test_cursor_close_resultset_failed_connectionless(self): engine = engines.testing_engine() the_conn = [] the_cursor = [] @event.listens_for(engine, "after_cursor_execute") def go( connection, cursor, statement, parameters, context, executemany ): the_cursor.append(cursor) the_conn.append(connection) with mock.patch( "sqlalchemy.engine.cursor.BaseCursorResult.__init__", Mock(side_effect=tsa.exc.InvalidRequestError("duplicate col")), ): with engine.connect() as conn: assert_raises( tsa.exc.InvalidRequestError, conn.execute, text("select 1"), ) # cursor is closed assert_raises_message( engine.dialect.dbapi.ProgrammingError, "Cannot operate on a closed cursor", the_cursor[0].execute, "select 1", ) # connection is closed assert the_conn[0].closed @testing.only_on("sqlite+pysqlite") def test_cursor_close_resultset_failed_explicit(self): engine = engines.testing_engine() the_cursor = [] @event.listens_for(engine, "after_cursor_execute") def go( connection, cursor, statement, parameters, context, executemany ): the_cursor.append(cursor) conn = engine.connect() with mock.patch( "sqlalchemy.engine.cursor.BaseCursorResult.__init__", Mock(side_effect=tsa.exc.InvalidRequestError("duplicate col")), ): assert_raises( tsa.exc.InvalidRequestError, conn.execute, text("select 1"), ) # cursor is closed assert_raises_message( engine.dialect.dbapi.ProgrammingError, "Cannot operate on a closed cursor", the_cursor[0].execute, "select 1", ) # connection not closed assert not conn.closed conn.close() class OnConnectTest(fixtures.TestBase): __requires__ = ("sqlite",) def setup_test(self): e = create_engine("sqlite://") connection = Mock(get_server_version_info=Mock(return_value="5.0")) def connect(*args, **kwargs): return connection dbapi = Mock( sqlite_version_info=(99, 9, 9), version_info=(99, 9, 9), sqlite_version="99.9.9", paramstyle="named", connect=Mock(side_effect=connect), ) sqlite3 = e.dialect.dbapi dbapi.Error = (sqlite3.Error,) dbapi.ProgrammingError = sqlite3.ProgrammingError self.dbapi = dbapi self.ProgrammingError = sqlite3.ProgrammingError def test_wraps_connect_in_dbapi(self): dbapi = self.dbapi dbapi.connect = Mock(side_effect=self.ProgrammingError("random error")) try: create_engine("sqlite://", module=dbapi).connect() assert False except tsa.exc.DBAPIError as de: assert not de.connection_invalidated def test_handle_error_event_connect(self): dbapi = self.dbapi dbapi.connect = Mock(side_effect=self.ProgrammingError("random error")) class MySpecialException(Exception): pass eng = create_engine("sqlite://", module=dbapi) @event.listens_for(eng, "handle_error") def handle_error(ctx): assert ctx.engine is eng assert ctx.connection is None raise MySpecialException("failed operation") assert_raises(MySpecialException, eng.connect) def test_handle_error_event_revalidate(self): dbapi = self.dbapi class MySpecialException(Exception): pass eng = create_engine("sqlite://", module=dbapi, _initialize=False) @event.listens_for(eng, "handle_error") def handle_error(ctx): assert ctx.engine is eng assert ctx.connection is conn assert isinstance( ctx.sqlalchemy_exception, tsa.exc.ProgrammingError ) raise MySpecialException("failed operation") conn = eng.connect() conn.invalidate() dbapi.connect = Mock(side_effect=self.ProgrammingError("random error")) assert_raises(MySpecialException, getattr, conn, "connection") def test_handle_error_event_implicit_revalidate(self): dbapi = self.dbapi class MySpecialException(Exception): pass eng = create_engine("sqlite://", module=dbapi, _initialize=False) @event.listens_for(eng, "handle_error") def handle_error(ctx): assert ctx.engine is eng assert ctx.connection is conn assert isinstance( ctx.sqlalchemy_exception, tsa.exc.ProgrammingError ) raise MySpecialException("failed operation") conn = eng.connect() conn.invalidate() dbapi.connect = Mock(side_effect=self.ProgrammingError("random error")) assert_raises(MySpecialException, conn.execute, select(1)) def test_handle_error_custom_connect(self): dbapi = self.dbapi class MySpecialException(Exception): pass def custom_connect(): raise self.ProgrammingError("random error") eng = create_engine("sqlite://", module=dbapi, creator=custom_connect) @event.listens_for(eng, "handle_error") def handle_error(ctx): assert ctx.engine is eng assert ctx.connection is None raise MySpecialException("failed operation") assert_raises(MySpecialException, eng.connect) def test_handle_error_event_connect_invalidate_flag(self): dbapi = self.dbapi dbapi.connect = Mock( side_effect=self.ProgrammingError( "Cannot operate on a closed database." ) ) class MySpecialException(Exception): pass eng = create_engine("sqlite://", module=dbapi) @event.listens_for(eng, "handle_error") def handle_error(ctx): assert ctx.is_disconnect ctx.is_disconnect = False try: eng.connect() assert False except tsa.exc.DBAPIError as de: assert not de.connection_invalidated def test_cant_connect_stay_invalidated(self): class MySpecialException(Exception): pass eng = create_engine("sqlite://") @event.listens_for(eng, "handle_error") def handle_error(ctx): assert ctx.is_disconnect conn = eng.connect() conn.invalidate() eng.pool._creator = Mock( side_effect=self.ProgrammingError( "Cannot operate on a closed database." ) ) try: conn.connection assert False except tsa.exc.DBAPIError: assert conn.invalidated def test_dont_touch_non_dbapi_exception_on_connect(self): dbapi = self.dbapi dbapi.connect = Mock(side_effect=TypeError("I'm not a DBAPI error")) e = create_engine("sqlite://", module=dbapi) e.dialect.is_disconnect = is_disconnect = Mock() assert_raises_message(TypeError, "I'm not a DBAPI error", e.connect) eq_(is_disconnect.call_count, 0) def test_ensure_dialect_does_is_disconnect_no_conn(self): """test that is_disconnect() doesn't choke if no connection, cursor given.""" dialect = testing.db.dialect dbapi = dialect.dbapi assert not dialect.is_disconnect( dbapi.OperationalError("test"), None, None ) def test_invalidate_on_connect(self): """test that is_disconnect() is called during connect. interpretation of connection failures are not supported by every backend. """ dbapi = self.dbapi dbapi.connect = Mock( side_effect=self.ProgrammingError( "Cannot operate on a closed database." ) ) e = create_engine("sqlite://", module=dbapi) try: e.connect() assert False except tsa.exc.DBAPIError as de: assert de.connection_invalidated @testing.only_on("sqlite+pysqlite") def test_initialize_connect_calls(self): """test for :ticket:`5497`, on_connect not called twice""" m1 = Mock() cls_ = testing.db.dialect.__class__ class SomeDialect(cls_): def initialize(self, connection): super(SomeDialect, self).initialize(connection) m1.initialize(connection) def on_connect(self): oc = super(SomeDialect, self).on_connect() def my_on_connect(conn): if oc: oc(conn) m1.on_connect(conn) return my_on_connect u1 = Mock( username=None, password=None, host=None, port=None, query={}, database=None, _instantiate_plugins=lambda kw: (u1, [], kw), _get_entrypoint=Mock( return_value=Mock(get_dialect_cls=lambda u: SomeDialect) ), ) eng = create_engine(u1, poolclass=QueuePool) # make sure other dialects aren't getting pulled in here eq_(eng.name, "sqlite") c = eng.connect() dbapi_conn_one = c.connection.dbapi_connection c.close() eq_( m1.mock_calls, [call.on_connect(dbapi_conn_one), call.initialize(mock.ANY)], ) c = eng.connect() eq_( m1.mock_calls, [call.on_connect(dbapi_conn_one), call.initialize(mock.ANY)], ) c2 = eng.connect() dbapi_conn_two = c2.connection.dbapi_connection is_not(dbapi_conn_one, dbapi_conn_two) eq_( m1.mock_calls, [ call.on_connect(dbapi_conn_one), call.initialize(mock.ANY), call.on_connect(dbapi_conn_two), ], ) c.close() c2.close() @testing.only_on("sqlite+pysqlite") def test_initialize_connect_race(self): """test for :ticket:`6337` fixing the regression in :ticket:`5497`, dialect init is mutexed""" m1 = [] cls_ = testing.db.dialect.__class__ class SomeDialect(cls_): def initialize(self, connection): super(SomeDialect, self).initialize(connection) m1.append("initialize") def on_connect(self): oc = super(SomeDialect, self).on_connect() def my_on_connect(conn): if oc: oc(conn) m1.append("on_connect") return my_on_connect u1 = Mock( username=None, password=None, host=None, port=None, query={}, database=None, _instantiate_plugins=lambda kw: (u1, [], kw), _get_entrypoint=Mock( return_value=Mock(get_dialect_cls=lambda u: SomeDialect) ), ) for j in range(5): m1[:] = [] eng = create_engine( u1, poolclass=NullPool, connect_args={"check_same_thread": False}, ) def go(): c = eng.connect() c.execute(text("select 1")) c.close() threads = [threading.Thread(target=go) for i in range(10)] for t in threads: t.start() for t in threads: t.join() eq_(m1, ["on_connect", "initialize"] + ["on_connect"] * 9) class DialectEventTest(fixtures.TestBase): @contextmanager def _run_test(self, retval): m1 = Mock() m1.do_execute.return_value = retval m1.do_executemany.return_value = retval m1.do_execute_no_params.return_value = retval e = engines.testing_engine(options={"_initialize": False}) event.listen(e, "do_execute", m1.do_execute) event.listen(e, "do_executemany", m1.do_executemany) event.listen(e, "do_execute_no_params", m1.do_execute_no_params) e.dialect.do_execute = m1.real_do_execute e.dialect.do_executemany = m1.real_do_executemany e.dialect.do_execute_no_params = m1.real_do_execute_no_params def mock_the_cursor(cursor, *arg): arg[-1].get_result_proxy = Mock(return_value=Mock(context=arg[-1])) return retval m1.real_do_execute.side_effect = ( m1.do_execute.side_effect ) = mock_the_cursor m1.real_do_executemany.side_effect = ( m1.do_executemany.side_effect ) = mock_the_cursor m1.real_do_execute_no_params.side_effect = ( m1.do_execute_no_params.side_effect ) = mock_the_cursor with e.begin() as conn: yield conn, m1 def _assert(self, retval, m1, m2, mock_calls): eq_(m1.mock_calls, mock_calls) if retval: eq_(m2.mock_calls, []) else: eq_(m2.mock_calls, mock_calls) def _test_do_execute(self, retval): with self._run_test(retval) as (conn, m1): result = conn.exec_driver_sql( "insert into table foo", {"foo": "bar"} ) self._assert( retval, m1.do_execute, m1.real_do_execute, [ call( result.context.cursor, "insert into table foo", {"foo": "bar"}, result.context, ) ], ) def _test_do_executemany(self, retval): with self._run_test(retval) as (conn, m1): result = conn.exec_driver_sql( "insert into table foo", [{"foo": "bar"}, {"foo": "bar"}] ) self._assert( retval, m1.do_executemany, m1.real_do_executemany, [ call( result.context.cursor, "insert into table foo", [{"foo": "bar"}, {"foo": "bar"}], result.context, ) ], ) def _test_do_execute_no_params(self, retval): with self._run_test(retval) as (conn, m1): result = conn.execution_options( no_parameters=True ).exec_driver_sql("insert into table foo") self._assert( retval, m1.do_execute_no_params, m1.real_do_execute_no_params, [ call( result.context.cursor, "insert into table foo", result.context, ) ], ) def _test_cursor_execute(self, retval): with self._run_test(retval) as (conn, m1): dialect = conn.dialect stmt = "insert into table foo" params = {"foo": "bar"} ctx = dialect.execution_ctx_cls._init_statement( dialect, conn, conn.connection, {}, stmt, [params], ) conn._cursor_execute(ctx.cursor, stmt, params, ctx) self._assert( retval, m1.do_execute, m1.real_do_execute, [call(ctx.cursor, "insert into table foo", {"foo": "bar"}, ctx)], ) def test_do_execute_w_replace(self): self._test_do_execute(True) def test_do_execute_wo_replace(self): self._test_do_execute(False) def test_do_executemany_w_replace(self): self._test_do_executemany(True) def test_do_executemany_wo_replace(self): self._test_do_executemany(False) def test_do_execute_no_params_w_replace(self): self._test_do_execute_no_params(True) def test_do_execute_no_params_wo_replace(self): self._test_do_execute_no_params(False) def test_cursor_execute_w_replace(self): self._test_cursor_execute(True) def test_cursor_execute_wo_replace(self): self._test_cursor_execute(False) def test_connect_replace_params(self): e = engines.testing_engine(options={"_initialize": False}) @event.listens_for(e, "do_connect") def evt(dialect, conn_rec, cargs, cparams): cargs[:] = ["foo", "hoho"] cparams.clear() cparams["bar"] = "bat" conn_rec.info["boom"] = "bap" m1 = Mock() e.dialect.connect = m1.real_connect with e.connect() as conn: eq_(m1.mock_calls, [call.real_connect("foo", "hoho", bar="bat")]) eq_(conn.info["boom"], "bap") def test_connect_do_connect(self): e = engines.testing_engine(options={"_initialize": False}) m1 = Mock() @event.listens_for(e, "do_connect") def evt1(dialect, conn_rec, cargs, cparams): cargs[:] = ["foo", "hoho"] cparams.clear() cparams["bar"] = "bat" conn_rec.info["boom"] = "one" @event.listens_for(e, "do_connect") def evt2(dialect, conn_rec, cargs, cparams): conn_rec.info["bap"] = "two" return m1.our_connect(cargs, cparams) with e.connect() as conn: # called with args eq_( m1.mock_calls, [call.our_connect(["foo", "hoho"], {"bar": "bat"})], ) eq_(conn.info["boom"], "one") eq_(conn.info["bap"], "two") # returned our mock connection is_(conn.connection.dbapi_connection, m1.our_connect()) def test_connect_do_connect_info_there_after_recycle(self): # test that info is maintained after the do_connect() # event for a soft invalidation. e = engines.testing_engine(options={"_initialize": False}) @event.listens_for(e, "do_connect") def evt1(dialect, conn_rec, cargs, cparams): conn_rec.info["boom"] = "one" conn = e.connect() eq_(conn.info["boom"], "one") conn.connection.invalidate(soft=True) conn.close() conn = e.connect() eq_(conn.info["boom"], "one") def test_connect_do_connect_info_there_after_invalidate(self): # test that info is maintained after the do_connect() # event for a hard invalidation. e = engines.testing_engine(options={"_initialize": False}) @event.listens_for(e, "do_connect") def evt1(dialect, conn_rec, cargs, cparams): assert not conn_rec.info conn_rec.info["boom"] = "one" conn = e.connect() eq_(conn.info["boom"], "one") conn.connection.invalidate() conn = e.connect() eq_(conn.info["boom"], "one") class FutureExecuteTest(fixtures.FutureEngineMixin, fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): Table( "users", metadata, Column("user_id", INT, primary_key=True, autoincrement=False), Column("user_name", VARCHAR(20)), test_needs_acid=True, ) Table( "users_autoinc", metadata, Column( "user_id", INT, primary_key=True, test_needs_autoincrement=True ), Column("user_name", VARCHAR(20)), test_needs_acid=True, ) @testing.combinations( ({}, {}, {}), ({"a": "b"}, {}, {"a": "b"}), ({"a": "b", "d": "e"}, {"a": "c"}, {"a": "c", "d": "e"}), argnames="conn_opts, exec_opts, expected", ) def test_execution_opts_per_invoke( self, connection, conn_opts, exec_opts, expected ): opts = [] @event.listens_for(connection, "before_cursor_execute") def before_cursor_execute( conn, cursor, statement, parameters, context, executemany ): opts.append(context.execution_options) if conn_opts: connection = connection.execution_options(**conn_opts) if exec_opts: connection.execute(select(1), execution_options=exec_opts) else: connection.execute(select(1)) eq_(opts, [expected]) @testing.combinations( ({}, {}, {}, {}), ({}, {"a": "b"}, {}, {"a": "b"}), ({}, {"a": "b", "d": "e"}, {"a": "c"}, {"a": "c", "d": "e"}), ( {"q": "z", "p": "r"}, {"a": "b", "p": "x", "d": "e"}, {"a": "c"}, {"q": "z", "p": "x", "a": "c", "d": "e"}, ), argnames="stmt_opts, conn_opts, exec_opts, expected", ) def test_execution_opts_per_invoke_execute_events( self, connection, stmt_opts, conn_opts, exec_opts, expected ): opts = [] @event.listens_for(connection, "before_execute") def before_execute( conn, clauseelement, multiparams, params, execution_options ): opts.append(("before", execution_options)) @event.listens_for(connection, "after_execute") def after_execute( conn, clauseelement, multiparams, params, execution_options, result, ): opts.append(("after", execution_options)) stmt = select(1) if stmt_opts: stmt = stmt.execution_options(**stmt_opts) if conn_opts: connection = connection.execution_options(**conn_opts) if exec_opts: connection.execute(stmt, execution_options=exec_opts) else: connection.execute(stmt) eq_(opts, [("before", expected), ("after", expected)]) def test_no_branching(self, connection): with testing.expect_deprecated( r"The Connection.connect\(\) method is considered legacy" ): assert_raises_message( NotImplementedError, "sqlalchemy.future.Connection does not support " "'branching' of new connections.", connection.connect, ) class SetInputSizesTest(fixtures.TablesTest): __backend__ = True __requires__ = ("independent_connections",) @classmethod def define_tables(cls, metadata): Table( "users", metadata, Column("user_id", INT, primary_key=True, autoincrement=False), Column("user_name", VARCHAR(20)), ) @testing.fixture def input_sizes_fixture(self, testing_engine): canary = mock.Mock() def do_set_input_sizes(cursor, list_of_tuples, context): if not engine.dialect.positional: # sort by "user_id", "user_name", or otherwise # param name for a non-positional dialect, so that we can # confirm the ordering. mostly a py2 thing probably can't # occur on py3.6+ since we are passing dictionaries with # "user_id", "user_name" list_of_tuples = sorted( list_of_tuples, key=lambda elem: elem[0] ) canary.do_set_input_sizes(cursor, list_of_tuples, context) def pre_exec(self): self.translate_set_input_sizes = None self.include_set_input_sizes = None self.exclude_set_input_sizes = None engine = testing_engine() engine.connect().close() # the idea of this test is we fully replace the dialect # do_set_input_sizes with a mock, and we can then intercept # the setting passed to the dialect. the test table uses very # "safe" datatypes so that the DBAPI does not actually need # setinputsizes() called in order to work. with mock.patch.object( engine.dialect, "use_setinputsizes", True ), mock.patch.object( engine.dialect, "do_set_input_sizes", do_set_input_sizes ), mock.patch.object( engine.dialect.execution_ctx_cls, "pre_exec", pre_exec ): yield engine, canary def test_set_input_sizes_no_event(self, input_sizes_fixture): engine, canary = input_sizes_fixture with engine.begin() as conn: conn.execute( self.tables.users.insert(), [ {"user_id": 1, "user_name": "n1"}, {"user_id": 2, "user_name": "n2"}, ], ) eq_( canary.mock_calls, [ call.do_set_input_sizes( mock.ANY, [ ( "user_id", mock.ANY, testing.eq_type_affinity(Integer), ), ( "user_name", mock.ANY, testing.eq_type_affinity(String), ), ], mock.ANY, ) ], ) def test_set_input_sizes_expanding_param(self, input_sizes_fixture): engine, canary = input_sizes_fixture with engine.connect() as conn: conn.execute( select(self.tables.users).where( self.tables.users.c.user_name.in_(["x", "y", "z"]) ) ) eq_( canary.mock_calls, [ call.do_set_input_sizes( mock.ANY, [ ( "user_name_1_1", mock.ANY, testing.eq_type_affinity(String), ), ( "user_name_1_2", mock.ANY, testing.eq_type_affinity(String), ), ( "user_name_1_3", mock.ANY, testing.eq_type_affinity(String), ), ], mock.ANY, ) ], ) @testing.requires.tuple_in def test_set_input_sizes_expanding_tuple_param(self, input_sizes_fixture): engine, canary = input_sizes_fixture from sqlalchemy import tuple_ with engine.connect() as conn: conn.execute( select(self.tables.users).where( tuple_( self.tables.users.c.user_id, self.tables.users.c.user_name, ).in_([(1, "x"), (2, "y")]) ) ) eq_( canary.mock_calls, [ call.do_set_input_sizes( mock.ANY, [ ( "param_1_1_1", mock.ANY, testing.eq_type_affinity(Integer), ), ( "param_1_1_2", mock.ANY, testing.eq_type_affinity(String), ), ( "param_1_2_1", mock.ANY, testing.eq_type_affinity(Integer), ), ( "param_1_2_2", mock.ANY, testing.eq_type_affinity(String), ), ], mock.ANY, ) ], ) def test_set_input_sizes_event(self, input_sizes_fixture): engine, canary = input_sizes_fixture SPECIAL_STRING = mock.Mock() @event.listens_for(engine, "do_setinputsizes") def do_setinputsizes( inputsizes, cursor, statement, parameters, context ): for k in inputsizes: if k.type._type_affinity is String: inputsizes[k] = ( SPECIAL_STRING, None, 0, ) with engine.begin() as conn: conn.execute( self.tables.users.insert(), [ {"user_id": 1, "user_name": "n1"}, {"user_id": 2, "user_name": "n2"}, ], ) eq_( canary.mock_calls, [ call.do_set_input_sizes( mock.ANY, [ ( "user_id", mock.ANY, testing.eq_type_affinity(Integer), ), ( "user_name", (SPECIAL_STRING, None, 0), testing.eq_type_affinity(String), ), ], mock.ANY, ) ], ) class DialectDoesntSupportCachingTest(fixtures.TestBase): """test the opt-in caching flag added in :ticket:`6184`.""" __only_on__ = "sqlite+pysqlite" __requires__ = ("sqlite_memory",) @testing.fixture() def sqlite_no_cache_dialect(self, testing_engine): from sqlalchemy.dialects.sqlite.pysqlite import SQLiteDialect_pysqlite from sqlalchemy.dialects.sqlite.base import SQLiteCompiler from sqlalchemy.sql import visitors class MyCompiler(SQLiteCompiler): def translate_select_structure(self, select_stmt, **kwargs): select = select_stmt if not getattr(select, "_mydialect_visit", None): select = visitors.cloned_traverse(select_stmt, {}, {}) if select._limit_clause is not None: # create a bindparam with a fixed name and hardcode # it to the given limit. this breaks caching. select._limit_clause = bindparam( "limit", value=select._limit, literal_execute=True ) select._mydialect_visit = True return select class MyDialect(SQLiteDialect_pysqlite): statement_compiler = MyCompiler from sqlalchemy.dialects import registry def go(name): return MyDialect with mock.patch.object(registry, "load", go): eng = testing_engine() yield eng @testing.fixture def data_fixture(self, sqlite_no_cache_dialect): m = MetaData() t = Table("t1", m, Column("x", Integer)) with sqlite_no_cache_dialect.begin() as conn: t.create(conn) conn.execute(t.insert(), [{"x": 1}, {"x": 2}, {"x": 3}, {"x": 4}]) return t def test_no_cache(self, sqlite_no_cache_dialect, data_fixture): eng = sqlite_no_cache_dialect def go(lim): with eng.connect() as conn: result = conn.execute( select(data_fixture).order_by(data_fixture.c.x).limit(lim) ) return result r1 = go(2) r2 = go(3) eq_(r1.all(), [(1,), (2,)]) eq_(r2.all(), [(1,), (2,), (3,)]) def test_it_caches(self, sqlite_no_cache_dialect, data_fixture): eng = sqlite_no_cache_dialect eng.dialect.__class__.supports_statement_cache = True del eng.dialect.__dict__["_supports_statement_cache"] def go(lim): with eng.connect() as conn: result = conn.execute( select(data_fixture).order_by(data_fixture.c.x).limit(lim) ) return result r1 = go(2) r2 = go(3) eq_(r1.all(), [(1,), (2,)]) # wrong answer eq_( r2.all(), [ (1,), (2,), ], )
32.190378
79
0.536827
4a14cbe25eabadf5e2dc84db1261781b8cffa6ed
640
py
Python
editor_tests/scm_tests/case_built-in-map,-filter,-reduce.py
Cal-CS-61A-Staff/scheme_interpreter_personal
ae6650362df566b15b364f0935aeb423cfca9f13
[ "MIT" ]
5
2019-03-31T23:25:44.000Z
2019-04-02T06:47:33.000Z
editor_tests/scm_tests/case_built-in-map,-filter,-reduce.py
Cal-CS-61A-Staff/scheme_interpreter_personal
ae6650362df566b15b364f0935aeb423cfca9f13
[ "MIT" ]
59
2019-02-16T10:36:51.000Z
2019-04-02T21:50:29.000Z
editor_tests/scm_tests/case_built-in-map,-filter,-reduce.py
Cal-CS-61A-Staff/scheme_editor
ae6650362df566b15b364f0935aeb423cfca9f13
[ "MIT" ]
1
2019-04-19T21:20:59.000Z
2019-04-19T21:20:59.000Z
from scheme_runner import SchemeTestCase, Query cases = [ SchemeTestCase([Query(code=['(define (square x) (* x x))'], expected={}), Query(code=["(map square '(1 2 3))"], expected={'out': ['(1 4 9)\n']}), Query(code=["(filter even? '(1 2 3 4 5))"], expected={'out': ['(2 4)\n']}), Query(code=["(reduce + '(1 2 3 4 5))"], expected={'out': ['15\n']}), Query(code=['(define (sum-of-squares x y)', '(+ (square x) (square y)))'], expected={}), Query(code=['(sum-of-squares 3 4)'], expected={'out': ['25\n']}), Query(code=['(define (f a)', '(sum-of-squares (+ a 1) (* a 2)))'], expected={}), Query(code=['(f 5)'], expected={'out': ['136\n']})]) ]
128
579
0.55625
4a14cc74d7dbdf4fce5d0c27571c562ce8ffc5a4
882,513
py
Python
accumulo/proxy/AccumuloProxy.py
faruken/python-accumulo
6bca3610fc8c712d86b5639f2baa7841f0f872a7
[ "Apache-2.0" ]
null
null
null
accumulo/proxy/AccumuloProxy.py
faruken/python-accumulo
6bca3610fc8c712d86b5639f2baa7841f0f872a7
[ "Apache-2.0" ]
1
2019-07-24T22:51:38.000Z
2019-07-24T22:51:38.000Z
accumulo/proxy/AccumuloProxy.py
faruken/python-accumulo
6bca3610fc8c712d86b5639f2baa7841f0f872a7
[ "Apache-2.0" ]
1
2020-02-27T01:39:01.000Z
2020-02-27T01:39:01.000Z
# # Autogenerated by Thrift Compiler (0.10.0) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py # from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException from thrift.protocol.TProtocol import TProtocolException import sys import logging from .ttypes import * from thrift.Thrift import TProcessor from thrift.transport import TTransport class Iface(object): def login(self, principal, loginProperties): """ Parameters: - principal - loginProperties """ pass def addConstraint(self, login, tableName, constraintClassName): """ Parameters: - login - tableName - constraintClassName """ pass def addSplits(self, login, tableName, splits): """ Parameters: - login - tableName - splits """ pass def attachIterator(self, login, tableName, setting, scopes): """ Parameters: - login - tableName - setting - scopes """ pass def checkIteratorConflicts(self, login, tableName, setting, scopes): """ Parameters: - login - tableName - setting - scopes """ pass def clearLocatorCache(self, login, tableName): """ Parameters: - login - tableName """ pass def cloneTable(self, login, tableName, newTableName, flush, propertiesToSet, propertiesToExclude): """ Parameters: - login - tableName - newTableName - flush - propertiesToSet - propertiesToExclude """ pass def compactTable(self, login, tableName, startRow, endRow, iterators, flush, wait, compactionStrategy): """ Parameters: - login - tableName - startRow - endRow - iterators - flush - wait - compactionStrategy """ pass def cancelCompaction(self, login, tableName): """ Parameters: - login - tableName """ pass def createTable(self, login, tableName, versioningIter, type): """ Parameters: - login - tableName - versioningIter - type """ pass def deleteTable(self, login, tableName): """ Parameters: - login - tableName """ pass def deleteRows(self, login, tableName, startRow, endRow): """ Parameters: - login - tableName - startRow - endRow """ pass def exportTable(self, login, tableName, exportDir): """ Parameters: - login - tableName - exportDir """ pass def flushTable(self, login, tableName, startRow, endRow, wait): """ Parameters: - login - tableName - startRow - endRow - wait """ pass def getDiskUsage(self, login, tables): """ Parameters: - login - tables """ pass def getLocalityGroups(self, login, tableName): """ Parameters: - login - tableName """ pass def getIteratorSetting(self, login, tableName, iteratorName, scope): """ Parameters: - login - tableName - iteratorName - scope """ pass def getMaxRow(self, login, tableName, auths, startRow, startInclusive, endRow, endInclusive): """ Parameters: - login - tableName - auths - startRow - startInclusive - endRow - endInclusive """ pass def getTableProperties(self, login, tableName): """ Parameters: - login - tableName """ pass def importDirectory(self, login, tableName, importDir, failureDir, setTime): """ Parameters: - login - tableName - importDir - failureDir - setTime """ pass def importTable(self, login, tableName, importDir): """ Parameters: - login - tableName - importDir """ pass def listSplits(self, login, tableName, maxSplits): """ Parameters: - login - tableName - maxSplits """ pass def listTables(self, login): """ Parameters: - login """ pass def listIterators(self, login, tableName): """ Parameters: - login - tableName """ pass def listConstraints(self, login, tableName): """ Parameters: - login - tableName """ pass def mergeTablets(self, login, tableName, startRow, endRow): """ Parameters: - login - tableName - startRow - endRow """ pass def offlineTable(self, login, tableName, wait): """ Parameters: - login - tableName - wait """ pass def onlineTable(self, login, tableName, wait): """ Parameters: - login - tableName - wait """ pass def removeConstraint(self, login, tableName, constraint): """ Parameters: - login - tableName - constraint """ pass def removeIterator(self, login, tableName, iterName, scopes): """ Parameters: - login - tableName - iterName - scopes """ pass def removeTableProperty(self, login, tableName, property): """ Parameters: - login - tableName - property """ pass def renameTable(self, login, oldTableName, newTableName): """ Parameters: - login - oldTableName - newTableName """ pass def setLocalityGroups(self, login, tableName, groups): """ Parameters: - login - tableName - groups """ pass def setTableProperty(self, login, tableName, property, value): """ Parameters: - login - tableName - property - value """ pass def splitRangeByTablets(self, login, tableName, range, maxSplits): """ Parameters: - login - tableName - range - maxSplits """ pass def tableExists(self, login, tableName): """ Parameters: - login - tableName """ pass def tableIdMap(self, login): """ Parameters: - login """ pass def testTableClassLoad(self, login, tableName, className, asTypeName): """ Parameters: - login - tableName - className - asTypeName """ pass def pingTabletServer(self, login, tserver): """ Parameters: - login - tserver """ pass def getActiveScans(self, login, tserver): """ Parameters: - login - tserver """ pass def getActiveCompactions(self, login, tserver): """ Parameters: - login - tserver """ pass def getSiteConfiguration(self, login): """ Parameters: - login """ pass def getSystemConfiguration(self, login): """ Parameters: - login """ pass def getTabletServers(self, login): """ Parameters: - login """ pass def removeProperty(self, login, property): """ Parameters: - login - property """ pass def setProperty(self, login, property, value): """ Parameters: - login - property - value """ pass def testClassLoad(self, login, className, asTypeName): """ Parameters: - login - className - asTypeName """ pass def authenticateUser(self, login, user, properties): """ Parameters: - login - user - properties """ pass def changeUserAuthorizations(self, login, user, authorizations): """ Parameters: - login - user - authorizations """ pass def changeLocalUserPassword(self, login, user, password): """ Parameters: - login - user - password """ pass def createLocalUser(self, login, user, password): """ Parameters: - login - user - password """ pass def dropLocalUser(self, login, user): """ Parameters: - login - user """ pass def getUserAuthorizations(self, login, user): """ Parameters: - login - user """ pass def grantSystemPermission(self, login, user, perm): """ Parameters: - login - user - perm """ pass def grantTablePermission(self, login, user, table, perm): """ Parameters: - login - user - table - perm """ pass def hasSystemPermission(self, login, user, perm): """ Parameters: - login - user - perm """ pass def hasTablePermission(self, login, user, table, perm): """ Parameters: - login - user - table - perm """ pass def listLocalUsers(self, login): """ Parameters: - login """ pass def revokeSystemPermission(self, login, user, perm): """ Parameters: - login - user - perm """ pass def revokeTablePermission(self, login, user, table, perm): """ Parameters: - login - user - table - perm """ pass def grantNamespacePermission(self, login, user, namespaceName, perm): """ Parameters: - login - user - namespaceName - perm """ pass def hasNamespacePermission(self, login, user, namespaceName, perm): """ Parameters: - login - user - namespaceName - perm """ pass def revokeNamespacePermission(self, login, user, namespaceName, perm): """ Parameters: - login - user - namespaceName - perm """ pass def createBatchScanner(self, login, tableName, options): """ Parameters: - login - tableName - options """ pass def createScanner(self, login, tableName, options): """ Parameters: - login - tableName - options """ pass def hasNext(self, scanner): """ Parameters: - scanner """ pass def nextEntry(self, scanner): """ Parameters: - scanner """ pass def nextK(self, scanner, k): """ Parameters: - scanner - k """ pass def closeScanner(self, scanner): """ Parameters: - scanner """ pass def updateAndFlush(self, login, tableName, cells): """ Parameters: - login - tableName - cells """ pass def createWriter(self, login, tableName, opts): """ Parameters: - login - tableName - opts """ pass def update(self, writer, cells): """ Parameters: - writer - cells """ pass def flush(self, writer): """ Parameters: - writer """ pass def closeWriter(self, writer): """ Parameters: - writer """ pass def updateRowConditionally(self, login, tableName, row, updates): """ Parameters: - login - tableName - row - updates """ pass def createConditionalWriter(self, login, tableName, options): """ Parameters: - login - tableName - options """ pass def updateRowsConditionally(self, conditionalWriter, updates): """ Parameters: - conditionalWriter - updates """ pass def closeConditionalWriter(self, conditionalWriter): """ Parameters: - conditionalWriter """ pass def getRowRange(self, row): """ Parameters: - row """ pass def getFollowing(self, key, part): """ Parameters: - key - part """ pass def systemNamespace(self): pass def defaultNamespace(self): pass def listNamespaces(self, login): """ Parameters: - login """ pass def namespaceExists(self, login, namespaceName): """ Parameters: - login - namespaceName """ pass def createNamespace(self, login, namespaceName): """ Parameters: - login - namespaceName """ pass def deleteNamespace(self, login, namespaceName): """ Parameters: - login - namespaceName """ pass def renameNamespace(self, login, oldNamespaceName, newNamespaceName): """ Parameters: - login - oldNamespaceName - newNamespaceName """ pass def setNamespaceProperty(self, login, namespaceName, property, value): """ Parameters: - login - namespaceName - property - value """ pass def removeNamespaceProperty(self, login, namespaceName, property): """ Parameters: - login - namespaceName - property """ pass def getNamespaceProperties(self, login, namespaceName): """ Parameters: - login - namespaceName """ pass def namespaceIdMap(self, login): """ Parameters: - login """ pass def attachNamespaceIterator(self, login, namespaceName, setting, scopes): """ Parameters: - login - namespaceName - setting - scopes """ pass def removeNamespaceIterator(self, login, namespaceName, name, scopes): """ Parameters: - login - namespaceName - name - scopes """ pass def getNamespaceIteratorSetting(self, login, namespaceName, name, scope): """ Parameters: - login - namespaceName - name - scope """ pass def listNamespaceIterators(self, login, namespaceName): """ Parameters: - login - namespaceName """ pass def checkNamespaceIteratorConflicts(self, login, namespaceName, setting, scopes): """ Parameters: - login - namespaceName - setting - scopes """ pass def addNamespaceConstraint(self, login, namespaceName, constraintClassName): """ Parameters: - login - namespaceName - constraintClassName """ pass def removeNamespaceConstraint(self, login, namespaceName, id): """ Parameters: - login - namespaceName - id """ pass def listNamespaceConstraints(self, login, namespaceName): """ Parameters: - login - namespaceName """ pass def testNamespaceClassLoad(self, login, namespaceName, className, asTypeName): """ Parameters: - login - namespaceName - className - asTypeName """ pass class Client(Iface): def __init__(self, iprot, oprot=None): self._iprot = self._oprot = iprot if oprot is not None: self._oprot = oprot self._seqid = 0 def login(self, principal, loginProperties): """ Parameters: - principal - loginProperties """ self.send_login(principal, loginProperties) return self.recv_login() def send_login(self, principal, loginProperties): self._oprot.writeMessageBegin('login', TMessageType.CALL, self._seqid) args = login_args() args.principal = principal args.loginProperties = loginProperties args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_login(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = login_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "login failed: unknown result") def addConstraint(self, login, tableName, constraintClassName): """ Parameters: - login - tableName - constraintClassName """ self.send_addConstraint(login, tableName, constraintClassName) return self.recv_addConstraint() def send_addConstraint(self, login, tableName, constraintClassName): self._oprot.writeMessageBegin('addConstraint', TMessageType.CALL, self._seqid) args = addConstraint_args() args.login = login args.tableName = tableName args.constraintClassName = constraintClassName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_addConstraint(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = addConstraint_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "addConstraint failed: unknown result") def addSplits(self, login, tableName, splits): """ Parameters: - login - tableName - splits """ self.send_addSplits(login, tableName, splits) self.recv_addSplits() def send_addSplits(self, login, tableName, splits): self._oprot.writeMessageBegin('addSplits', TMessageType.CALL, self._seqid) args = addSplits_args() args.login = login args.tableName = tableName args.splits = splits args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_addSplits(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = addSplits_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def attachIterator(self, login, tableName, setting, scopes): """ Parameters: - login - tableName - setting - scopes """ self.send_attachIterator(login, tableName, setting, scopes) self.recv_attachIterator() def send_attachIterator(self, login, tableName, setting, scopes): self._oprot.writeMessageBegin('attachIterator', TMessageType.CALL, self._seqid) args = attachIterator_args() args.login = login args.tableName = tableName args.setting = setting args.scopes = scopes args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_attachIterator(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = attachIterator_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def checkIteratorConflicts(self, login, tableName, setting, scopes): """ Parameters: - login - tableName - setting - scopes """ self.send_checkIteratorConflicts(login, tableName, setting, scopes) self.recv_checkIteratorConflicts() def send_checkIteratorConflicts(self, login, tableName, setting, scopes): self._oprot.writeMessageBegin('checkIteratorConflicts', TMessageType.CALL, self._seqid) args = checkIteratorConflicts_args() args.login = login args.tableName = tableName args.setting = setting args.scopes = scopes args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_checkIteratorConflicts(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = checkIteratorConflicts_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def clearLocatorCache(self, login, tableName): """ Parameters: - login - tableName """ self.send_clearLocatorCache(login, tableName) self.recv_clearLocatorCache() def send_clearLocatorCache(self, login, tableName): self._oprot.writeMessageBegin('clearLocatorCache', TMessageType.CALL, self._seqid) args = clearLocatorCache_args() args.login = login args.tableName = tableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_clearLocatorCache(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = clearLocatorCache_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 return def cloneTable(self, login, tableName, newTableName, flush, propertiesToSet, propertiesToExclude): """ Parameters: - login - tableName - newTableName - flush - propertiesToSet - propertiesToExclude """ self.send_cloneTable(login, tableName, newTableName, flush, propertiesToSet, propertiesToExclude) self.recv_cloneTable() def send_cloneTable(self, login, tableName, newTableName, flush, propertiesToSet, propertiesToExclude): self._oprot.writeMessageBegin('cloneTable', TMessageType.CALL, self._seqid) args = cloneTable_args() args.login = login args.tableName = tableName args.newTableName = newTableName args.flush = flush args.propertiesToSet = propertiesToSet args.propertiesToExclude = propertiesToExclude args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_cloneTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = cloneTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 if result.ouch4 is not None: raise result.ouch4 return def compactTable(self, login, tableName, startRow, endRow, iterators, flush, wait, compactionStrategy): """ Parameters: - login - tableName - startRow - endRow - iterators - flush - wait - compactionStrategy """ self.send_compactTable(login, tableName, startRow, endRow, iterators, flush, wait, compactionStrategy) self.recv_compactTable() def send_compactTable(self, login, tableName, startRow, endRow, iterators, flush, wait, compactionStrategy): self._oprot.writeMessageBegin('compactTable', TMessageType.CALL, self._seqid) args = compactTable_args() args.login = login args.tableName = tableName args.startRow = startRow args.endRow = endRow args.iterators = iterators args.flush = flush args.wait = wait args.compactionStrategy = compactionStrategy args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_compactTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = compactTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def cancelCompaction(self, login, tableName): """ Parameters: - login - tableName """ self.send_cancelCompaction(login, tableName) self.recv_cancelCompaction() def send_cancelCompaction(self, login, tableName): self._oprot.writeMessageBegin('cancelCompaction', TMessageType.CALL, self._seqid) args = cancelCompaction_args() args.login = login args.tableName = tableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_cancelCompaction(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = cancelCompaction_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def createTable(self, login, tableName, versioningIter, type): """ Parameters: - login - tableName - versioningIter - type """ self.send_createTable(login, tableName, versioningIter, type) self.recv_createTable() def send_createTable(self, login, tableName, versioningIter, type): self._oprot.writeMessageBegin('createTable', TMessageType.CALL, self._seqid) args = createTable_args() args.login = login args.tableName = tableName args.versioningIter = versioningIter args.type = type args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_createTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = createTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def deleteTable(self, login, tableName): """ Parameters: - login - tableName """ self.send_deleteTable(login, tableName) self.recv_deleteTable() def send_deleteTable(self, login, tableName): self._oprot.writeMessageBegin('deleteTable', TMessageType.CALL, self._seqid) args = deleteTable_args() args.login = login args.tableName = tableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_deleteTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = deleteTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def deleteRows(self, login, tableName, startRow, endRow): """ Parameters: - login - tableName - startRow - endRow """ self.send_deleteRows(login, tableName, startRow, endRow) self.recv_deleteRows() def send_deleteRows(self, login, tableName, startRow, endRow): self._oprot.writeMessageBegin('deleteRows', TMessageType.CALL, self._seqid) args = deleteRows_args() args.login = login args.tableName = tableName args.startRow = startRow args.endRow = endRow args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_deleteRows(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = deleteRows_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def exportTable(self, login, tableName, exportDir): """ Parameters: - login - tableName - exportDir """ self.send_exportTable(login, tableName, exportDir) self.recv_exportTable() def send_exportTable(self, login, tableName, exportDir): self._oprot.writeMessageBegin('exportTable', TMessageType.CALL, self._seqid) args = exportTable_args() args.login = login args.tableName = tableName args.exportDir = exportDir args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_exportTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = exportTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def flushTable(self, login, tableName, startRow, endRow, wait): """ Parameters: - login - tableName - startRow - endRow - wait """ self.send_flushTable(login, tableName, startRow, endRow, wait) self.recv_flushTable() def send_flushTable(self, login, tableName, startRow, endRow, wait): self._oprot.writeMessageBegin('flushTable', TMessageType.CALL, self._seqid) args = flushTable_args() args.login = login args.tableName = tableName args.startRow = startRow args.endRow = endRow args.wait = wait args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_flushTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = flushTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def getDiskUsage(self, login, tables): """ Parameters: - login - tables """ self.send_getDiskUsage(login, tables) return self.recv_getDiskUsage() def send_getDiskUsage(self, login, tables): self._oprot.writeMessageBegin('getDiskUsage', TMessageType.CALL, self._seqid) args = getDiskUsage_args() args.login = login args.tables = tables args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getDiskUsage(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getDiskUsage_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "getDiskUsage failed: unknown result") def getLocalityGroups(self, login, tableName): """ Parameters: - login - tableName """ self.send_getLocalityGroups(login, tableName) return self.recv_getLocalityGroups() def send_getLocalityGroups(self, login, tableName): self._oprot.writeMessageBegin('getLocalityGroups', TMessageType.CALL, self._seqid) args = getLocalityGroups_args() args.login = login args.tableName = tableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getLocalityGroups(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getLocalityGroups_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "getLocalityGroups failed: unknown result") def getIteratorSetting(self, login, tableName, iteratorName, scope): """ Parameters: - login - tableName - iteratorName - scope """ self.send_getIteratorSetting(login, tableName, iteratorName, scope) return self.recv_getIteratorSetting() def send_getIteratorSetting(self, login, tableName, iteratorName, scope): self._oprot.writeMessageBegin('getIteratorSetting', TMessageType.CALL, self._seqid) args = getIteratorSetting_args() args.login = login args.tableName = tableName args.iteratorName = iteratorName args.scope = scope args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getIteratorSetting(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getIteratorSetting_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "getIteratorSetting failed: unknown result") def getMaxRow(self, login, tableName, auths, startRow, startInclusive, endRow, endInclusive): """ Parameters: - login - tableName - auths - startRow - startInclusive - endRow - endInclusive """ self.send_getMaxRow(login, tableName, auths, startRow, startInclusive, endRow, endInclusive) return self.recv_getMaxRow() def send_getMaxRow(self, login, tableName, auths, startRow, startInclusive, endRow, endInclusive): self._oprot.writeMessageBegin('getMaxRow', TMessageType.CALL, self._seqid) args = getMaxRow_args() args.login = login args.tableName = tableName args.auths = auths args.startRow = startRow args.startInclusive = startInclusive args.endRow = endRow args.endInclusive = endInclusive args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getMaxRow(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getMaxRow_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "getMaxRow failed: unknown result") def getTableProperties(self, login, tableName): """ Parameters: - login - tableName """ self.send_getTableProperties(login, tableName) return self.recv_getTableProperties() def send_getTableProperties(self, login, tableName): self._oprot.writeMessageBegin('getTableProperties', TMessageType.CALL, self._seqid) args = getTableProperties_args() args.login = login args.tableName = tableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getTableProperties(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getTableProperties_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "getTableProperties failed: unknown result") def importDirectory(self, login, tableName, importDir, failureDir, setTime): """ Parameters: - login - tableName - importDir - failureDir - setTime """ self.send_importDirectory(login, tableName, importDir, failureDir, setTime) self.recv_importDirectory() def send_importDirectory(self, login, tableName, importDir, failureDir, setTime): self._oprot.writeMessageBegin('importDirectory', TMessageType.CALL, self._seqid) args = importDirectory_args() args.login = login args.tableName = tableName args.importDir = importDir args.failureDir = failureDir args.setTime = setTime args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_importDirectory(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = importDirectory_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch3 is not None: raise result.ouch3 if result.ouch4 is not None: raise result.ouch4 return def importTable(self, login, tableName, importDir): """ Parameters: - login - tableName - importDir """ self.send_importTable(login, tableName, importDir) self.recv_importTable() def send_importTable(self, login, tableName, importDir): self._oprot.writeMessageBegin('importTable', TMessageType.CALL, self._seqid) args = importTable_args() args.login = login args.tableName = tableName args.importDir = importDir args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_importTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = importTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def listSplits(self, login, tableName, maxSplits): """ Parameters: - login - tableName - maxSplits """ self.send_listSplits(login, tableName, maxSplits) return self.recv_listSplits() def send_listSplits(self, login, tableName, maxSplits): self._oprot.writeMessageBegin('listSplits', TMessageType.CALL, self._seqid) args = listSplits_args() args.login = login args.tableName = tableName args.maxSplits = maxSplits args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_listSplits(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = listSplits_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "listSplits failed: unknown result") def listTables(self, login): """ Parameters: - login """ self.send_listTables(login) return self.recv_listTables() def send_listTables(self, login): self._oprot.writeMessageBegin('listTables', TMessageType.CALL, self._seqid) args = listTables_args() args.login = login args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_listTables(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = listTables_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "listTables failed: unknown result") def listIterators(self, login, tableName): """ Parameters: - login - tableName """ self.send_listIterators(login, tableName) return self.recv_listIterators() def send_listIterators(self, login, tableName): self._oprot.writeMessageBegin('listIterators', TMessageType.CALL, self._seqid) args = listIterators_args() args.login = login args.tableName = tableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_listIterators(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = listIterators_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "listIterators failed: unknown result") def listConstraints(self, login, tableName): """ Parameters: - login - tableName """ self.send_listConstraints(login, tableName) return self.recv_listConstraints() def send_listConstraints(self, login, tableName): self._oprot.writeMessageBegin('listConstraints', TMessageType.CALL, self._seqid) args = listConstraints_args() args.login = login args.tableName = tableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_listConstraints(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = listConstraints_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "listConstraints failed: unknown result") def mergeTablets(self, login, tableName, startRow, endRow): """ Parameters: - login - tableName - startRow - endRow """ self.send_mergeTablets(login, tableName, startRow, endRow) self.recv_mergeTablets() def send_mergeTablets(self, login, tableName, startRow, endRow): self._oprot.writeMessageBegin('mergeTablets', TMessageType.CALL, self._seqid) args = mergeTablets_args() args.login = login args.tableName = tableName args.startRow = startRow args.endRow = endRow args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_mergeTablets(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = mergeTablets_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def offlineTable(self, login, tableName, wait): """ Parameters: - login - tableName - wait """ self.send_offlineTable(login, tableName, wait) self.recv_offlineTable() def send_offlineTable(self, login, tableName, wait): self._oprot.writeMessageBegin('offlineTable', TMessageType.CALL, self._seqid) args = offlineTable_args() args.login = login args.tableName = tableName args.wait = wait args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_offlineTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = offlineTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def onlineTable(self, login, tableName, wait): """ Parameters: - login - tableName - wait """ self.send_onlineTable(login, tableName, wait) self.recv_onlineTable() def send_onlineTable(self, login, tableName, wait): self._oprot.writeMessageBegin('onlineTable', TMessageType.CALL, self._seqid) args = onlineTable_args() args.login = login args.tableName = tableName args.wait = wait args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_onlineTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = onlineTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def removeConstraint(self, login, tableName, constraint): """ Parameters: - login - tableName - constraint """ self.send_removeConstraint(login, tableName, constraint) self.recv_removeConstraint() def send_removeConstraint(self, login, tableName, constraint): self._oprot.writeMessageBegin('removeConstraint', TMessageType.CALL, self._seqid) args = removeConstraint_args() args.login = login args.tableName = tableName args.constraint = constraint args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_removeConstraint(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = removeConstraint_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def removeIterator(self, login, tableName, iterName, scopes): """ Parameters: - login - tableName - iterName - scopes """ self.send_removeIterator(login, tableName, iterName, scopes) self.recv_removeIterator() def send_removeIterator(self, login, tableName, iterName, scopes): self._oprot.writeMessageBegin('removeIterator', TMessageType.CALL, self._seqid) args = removeIterator_args() args.login = login args.tableName = tableName args.iterName = iterName args.scopes = scopes args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_removeIterator(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = removeIterator_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def removeTableProperty(self, login, tableName, property): """ Parameters: - login - tableName - property """ self.send_removeTableProperty(login, tableName, property) self.recv_removeTableProperty() def send_removeTableProperty(self, login, tableName, property): self._oprot.writeMessageBegin('removeTableProperty', TMessageType.CALL, self._seqid) args = removeTableProperty_args() args.login = login args.tableName = tableName args.property = property args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_removeTableProperty(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = removeTableProperty_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def renameTable(self, login, oldTableName, newTableName): """ Parameters: - login - oldTableName - newTableName """ self.send_renameTable(login, oldTableName, newTableName) self.recv_renameTable() def send_renameTable(self, login, oldTableName, newTableName): self._oprot.writeMessageBegin('renameTable', TMessageType.CALL, self._seqid) args = renameTable_args() args.login = login args.oldTableName = oldTableName args.newTableName = newTableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_renameTable(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = renameTable_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 if result.ouch4 is not None: raise result.ouch4 return def setLocalityGroups(self, login, tableName, groups): """ Parameters: - login - tableName - groups """ self.send_setLocalityGroups(login, tableName, groups) self.recv_setLocalityGroups() def send_setLocalityGroups(self, login, tableName, groups): self._oprot.writeMessageBegin('setLocalityGroups', TMessageType.CALL, self._seqid) args = setLocalityGroups_args() args.login = login args.tableName = tableName args.groups = groups args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_setLocalityGroups(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = setLocalityGroups_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def setTableProperty(self, login, tableName, property, value): """ Parameters: - login - tableName - property - value """ self.send_setTableProperty(login, tableName, property, value) self.recv_setTableProperty() def send_setTableProperty(self, login, tableName, property, value): self._oprot.writeMessageBegin('setTableProperty', TMessageType.CALL, self._seqid) args = setTableProperty_args() args.login = login args.tableName = tableName args.property = property args.value = value args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_setTableProperty(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = setTableProperty_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def splitRangeByTablets(self, login, tableName, range, maxSplits): """ Parameters: - login - tableName - range - maxSplits """ self.send_splitRangeByTablets(login, tableName, range, maxSplits) return self.recv_splitRangeByTablets() def send_splitRangeByTablets(self, login, tableName, range, maxSplits): self._oprot.writeMessageBegin('splitRangeByTablets', TMessageType.CALL, self._seqid) args = splitRangeByTablets_args() args.login = login args.tableName = tableName args.range = range args.maxSplits = maxSplits args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_splitRangeByTablets(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = splitRangeByTablets_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "splitRangeByTablets failed: unknown result") def tableExists(self, login, tableName): """ Parameters: - login - tableName """ self.send_tableExists(login, tableName) return self.recv_tableExists() def send_tableExists(self, login, tableName): self._oprot.writeMessageBegin('tableExists', TMessageType.CALL, self._seqid) args = tableExists_args() args.login = login args.tableName = tableName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_tableExists(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = tableExists_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "tableExists failed: unknown result") def tableIdMap(self, login): """ Parameters: - login """ self.send_tableIdMap(login) return self.recv_tableIdMap() def send_tableIdMap(self, login): self._oprot.writeMessageBegin('tableIdMap', TMessageType.CALL, self._seqid) args = tableIdMap_args() args.login = login args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_tableIdMap(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = tableIdMap_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "tableIdMap failed: unknown result") def testTableClassLoad(self, login, tableName, className, asTypeName): """ Parameters: - login - tableName - className - asTypeName """ self.send_testTableClassLoad(login, tableName, className, asTypeName) return self.recv_testTableClassLoad() def send_testTableClassLoad(self, login, tableName, className, asTypeName): self._oprot.writeMessageBegin('testTableClassLoad', TMessageType.CALL, self._seqid) args = testTableClassLoad_args() args.login = login args.tableName = tableName args.className = className args.asTypeName = asTypeName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_testTableClassLoad(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = testTableClassLoad_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "testTableClassLoad failed: unknown result") def pingTabletServer(self, login, tserver): """ Parameters: - login - tserver """ self.send_pingTabletServer(login, tserver) self.recv_pingTabletServer() def send_pingTabletServer(self, login, tserver): self._oprot.writeMessageBegin('pingTabletServer', TMessageType.CALL, self._seqid) args = pingTabletServer_args() args.login = login args.tserver = tserver args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_pingTabletServer(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = pingTabletServer_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def getActiveScans(self, login, tserver): """ Parameters: - login - tserver """ self.send_getActiveScans(login, tserver) return self.recv_getActiveScans() def send_getActiveScans(self, login, tserver): self._oprot.writeMessageBegin('getActiveScans', TMessageType.CALL, self._seqid) args = getActiveScans_args() args.login = login args.tserver = tserver args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getActiveScans(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getActiveScans_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "getActiveScans failed: unknown result") def getActiveCompactions(self, login, tserver): """ Parameters: - login - tserver """ self.send_getActiveCompactions(login, tserver) return self.recv_getActiveCompactions() def send_getActiveCompactions(self, login, tserver): self._oprot.writeMessageBegin('getActiveCompactions', TMessageType.CALL, self._seqid) args = getActiveCompactions_args() args.login = login args.tserver = tserver args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getActiveCompactions(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getActiveCompactions_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "getActiveCompactions failed: unknown result") def getSiteConfiguration(self, login): """ Parameters: - login """ self.send_getSiteConfiguration(login) return self.recv_getSiteConfiguration() def send_getSiteConfiguration(self, login): self._oprot.writeMessageBegin('getSiteConfiguration', TMessageType.CALL, self._seqid) args = getSiteConfiguration_args() args.login = login args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getSiteConfiguration(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getSiteConfiguration_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "getSiteConfiguration failed: unknown result") def getSystemConfiguration(self, login): """ Parameters: - login """ self.send_getSystemConfiguration(login) return self.recv_getSystemConfiguration() def send_getSystemConfiguration(self, login): self._oprot.writeMessageBegin('getSystemConfiguration', TMessageType.CALL, self._seqid) args = getSystemConfiguration_args() args.login = login args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getSystemConfiguration(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getSystemConfiguration_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "getSystemConfiguration failed: unknown result") def getTabletServers(self, login): """ Parameters: - login """ self.send_getTabletServers(login) return self.recv_getTabletServers() def send_getTabletServers(self, login): self._oprot.writeMessageBegin('getTabletServers', TMessageType.CALL, self._seqid) args = getTabletServers_args() args.login = login args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getTabletServers(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getTabletServers_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "getTabletServers failed: unknown result") def removeProperty(self, login, property): """ Parameters: - login - property """ self.send_removeProperty(login, property) self.recv_removeProperty() def send_removeProperty(self, login, property): self._oprot.writeMessageBegin('removeProperty', TMessageType.CALL, self._seqid) args = removeProperty_args() args.login = login args.property = property args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_removeProperty(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = removeProperty_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def setProperty(self, login, property, value): """ Parameters: - login - property - value """ self.send_setProperty(login, property, value) self.recv_setProperty() def send_setProperty(self, login, property, value): self._oprot.writeMessageBegin('setProperty', TMessageType.CALL, self._seqid) args = setProperty_args() args.login = login args.property = property args.value = value args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_setProperty(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = setProperty_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def testClassLoad(self, login, className, asTypeName): """ Parameters: - login - className - asTypeName """ self.send_testClassLoad(login, className, asTypeName) return self.recv_testClassLoad() def send_testClassLoad(self, login, className, asTypeName): self._oprot.writeMessageBegin('testClassLoad', TMessageType.CALL, self._seqid) args = testClassLoad_args() args.login = login args.className = className args.asTypeName = asTypeName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_testClassLoad(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = testClassLoad_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "testClassLoad failed: unknown result") def authenticateUser(self, login, user, properties): """ Parameters: - login - user - properties """ self.send_authenticateUser(login, user, properties) return self.recv_authenticateUser() def send_authenticateUser(self, login, user, properties): self._oprot.writeMessageBegin('authenticateUser', TMessageType.CALL, self._seqid) args = authenticateUser_args() args.login = login args.user = user args.properties = properties args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_authenticateUser(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = authenticateUser_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "authenticateUser failed: unknown result") def changeUserAuthorizations(self, login, user, authorizations): """ Parameters: - login - user - authorizations """ self.send_changeUserAuthorizations(login, user, authorizations) self.recv_changeUserAuthorizations() def send_changeUserAuthorizations(self, login, user, authorizations): self._oprot.writeMessageBegin('changeUserAuthorizations', TMessageType.CALL, self._seqid) args = changeUserAuthorizations_args() args.login = login args.user = user args.authorizations = authorizations args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_changeUserAuthorizations(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = changeUserAuthorizations_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def changeLocalUserPassword(self, login, user, password): """ Parameters: - login - user - password """ self.send_changeLocalUserPassword(login, user, password) self.recv_changeLocalUserPassword() def send_changeLocalUserPassword(self, login, user, password): self._oprot.writeMessageBegin('changeLocalUserPassword', TMessageType.CALL, self._seqid) args = changeLocalUserPassword_args() args.login = login args.user = user args.password = password args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_changeLocalUserPassword(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = changeLocalUserPassword_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def createLocalUser(self, login, user, password): """ Parameters: - login - user - password """ self.send_createLocalUser(login, user, password) self.recv_createLocalUser() def send_createLocalUser(self, login, user, password): self._oprot.writeMessageBegin('createLocalUser', TMessageType.CALL, self._seqid) args = createLocalUser_args() args.login = login args.user = user args.password = password args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_createLocalUser(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = createLocalUser_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def dropLocalUser(self, login, user): """ Parameters: - login - user """ self.send_dropLocalUser(login, user) self.recv_dropLocalUser() def send_dropLocalUser(self, login, user): self._oprot.writeMessageBegin('dropLocalUser', TMessageType.CALL, self._seqid) args = dropLocalUser_args() args.login = login args.user = user args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_dropLocalUser(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = dropLocalUser_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def getUserAuthorizations(self, login, user): """ Parameters: - login - user """ self.send_getUserAuthorizations(login, user) return self.recv_getUserAuthorizations() def send_getUserAuthorizations(self, login, user): self._oprot.writeMessageBegin('getUserAuthorizations', TMessageType.CALL, self._seqid) args = getUserAuthorizations_args() args.login = login args.user = user args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getUserAuthorizations(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getUserAuthorizations_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "getUserAuthorizations failed: unknown result") def grantSystemPermission(self, login, user, perm): """ Parameters: - login - user - perm """ self.send_grantSystemPermission(login, user, perm) self.recv_grantSystemPermission() def send_grantSystemPermission(self, login, user, perm): self._oprot.writeMessageBegin('grantSystemPermission', TMessageType.CALL, self._seqid) args = grantSystemPermission_args() args.login = login args.user = user args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_grantSystemPermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grantSystemPermission_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def grantTablePermission(self, login, user, table, perm): """ Parameters: - login - user - table - perm """ self.send_grantTablePermission(login, user, table, perm) self.recv_grantTablePermission() def send_grantTablePermission(self, login, user, table, perm): self._oprot.writeMessageBegin('grantTablePermission', TMessageType.CALL, self._seqid) args = grantTablePermission_args() args.login = login args.user = user args.table = table args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_grantTablePermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grantTablePermission_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def hasSystemPermission(self, login, user, perm): """ Parameters: - login - user - perm """ self.send_hasSystemPermission(login, user, perm) return self.recv_hasSystemPermission() def send_hasSystemPermission(self, login, user, perm): self._oprot.writeMessageBegin('hasSystemPermission', TMessageType.CALL, self._seqid) args = hasSystemPermission_args() args.login = login args.user = user args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_hasSystemPermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = hasSystemPermission_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "hasSystemPermission failed: unknown result") def hasTablePermission(self, login, user, table, perm): """ Parameters: - login - user - table - perm """ self.send_hasTablePermission(login, user, table, perm) return self.recv_hasTablePermission() def send_hasTablePermission(self, login, user, table, perm): self._oprot.writeMessageBegin('hasTablePermission', TMessageType.CALL, self._seqid) args = hasTablePermission_args() args.login = login args.user = user args.table = table args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_hasTablePermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = hasTablePermission_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "hasTablePermission failed: unknown result") def listLocalUsers(self, login): """ Parameters: - login """ self.send_listLocalUsers(login) return self.recv_listLocalUsers() def send_listLocalUsers(self, login): self._oprot.writeMessageBegin('listLocalUsers', TMessageType.CALL, self._seqid) args = listLocalUsers_args() args.login = login args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_listLocalUsers(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = listLocalUsers_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "listLocalUsers failed: unknown result") def revokeSystemPermission(self, login, user, perm): """ Parameters: - login - user - perm """ self.send_revokeSystemPermission(login, user, perm) self.recv_revokeSystemPermission() def send_revokeSystemPermission(self, login, user, perm): self._oprot.writeMessageBegin('revokeSystemPermission', TMessageType.CALL, self._seqid) args = revokeSystemPermission_args() args.login = login args.user = user args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_revokeSystemPermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = revokeSystemPermission_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def revokeTablePermission(self, login, user, table, perm): """ Parameters: - login - user - table - perm """ self.send_revokeTablePermission(login, user, table, perm) self.recv_revokeTablePermission() def send_revokeTablePermission(self, login, user, table, perm): self._oprot.writeMessageBegin('revokeTablePermission', TMessageType.CALL, self._seqid) args = revokeTablePermission_args() args.login = login args.user = user args.table = table args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_revokeTablePermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = revokeTablePermission_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def grantNamespacePermission(self, login, user, namespaceName, perm): """ Parameters: - login - user - namespaceName - perm """ self.send_grantNamespacePermission(login, user, namespaceName, perm) self.recv_grantNamespacePermission() def send_grantNamespacePermission(self, login, user, namespaceName, perm): self._oprot.writeMessageBegin('grantNamespacePermission', TMessageType.CALL, self._seqid) args = grantNamespacePermission_args() args.login = login args.user = user args.namespaceName = namespaceName args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_grantNamespacePermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grantNamespacePermission_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def hasNamespacePermission(self, login, user, namespaceName, perm): """ Parameters: - login - user - namespaceName - perm """ self.send_hasNamespacePermission(login, user, namespaceName, perm) return self.recv_hasNamespacePermission() def send_hasNamespacePermission(self, login, user, namespaceName, perm): self._oprot.writeMessageBegin('hasNamespacePermission', TMessageType.CALL, self._seqid) args = hasNamespacePermission_args() args.login = login args.user = user args.namespaceName = namespaceName args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_hasNamespacePermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = hasNamespacePermission_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "hasNamespacePermission failed: unknown result") def revokeNamespacePermission(self, login, user, namespaceName, perm): """ Parameters: - login - user - namespaceName - perm """ self.send_revokeNamespacePermission(login, user, namespaceName, perm) self.recv_revokeNamespacePermission() def send_revokeNamespacePermission(self, login, user, namespaceName, perm): self._oprot.writeMessageBegin('revokeNamespacePermission', TMessageType.CALL, self._seqid) args = revokeNamespacePermission_args() args.login = login args.user = user args.namespaceName = namespaceName args.perm = perm args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_revokeNamespacePermission(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = revokeNamespacePermission_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def createBatchScanner(self, login, tableName, options): """ Parameters: - login - tableName - options """ self.send_createBatchScanner(login, tableName, options) return self.recv_createBatchScanner() def send_createBatchScanner(self, login, tableName, options): self._oprot.writeMessageBegin('createBatchScanner', TMessageType.CALL, self._seqid) args = createBatchScanner_args() args.login = login args.tableName = tableName args.options = options args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_createBatchScanner(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = createBatchScanner_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "createBatchScanner failed: unknown result") def createScanner(self, login, tableName, options): """ Parameters: - login - tableName - options """ self.send_createScanner(login, tableName, options) return self.recv_createScanner() def send_createScanner(self, login, tableName, options): self._oprot.writeMessageBegin('createScanner', TMessageType.CALL, self._seqid) args = createScanner_args() args.login = login args.tableName = tableName args.options = options args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_createScanner(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = createScanner_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "createScanner failed: unknown result") def hasNext(self, scanner): """ Parameters: - scanner """ self.send_hasNext(scanner) return self.recv_hasNext() def send_hasNext(self, scanner): self._oprot.writeMessageBegin('hasNext', TMessageType.CALL, self._seqid) args = hasNext_args() args.scanner = scanner args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_hasNext(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = hasNext_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 raise TApplicationException(TApplicationException.MISSING_RESULT, "hasNext failed: unknown result") def nextEntry(self, scanner): """ Parameters: - scanner """ self.send_nextEntry(scanner) return self.recv_nextEntry() def send_nextEntry(self, scanner): self._oprot.writeMessageBegin('nextEntry', TMessageType.CALL, self._seqid) args = nextEntry_args() args.scanner = scanner args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_nextEntry(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = nextEntry_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "nextEntry failed: unknown result") def nextK(self, scanner, k): """ Parameters: - scanner - k """ self.send_nextK(scanner, k) return self.recv_nextK() def send_nextK(self, scanner, k): self._oprot.writeMessageBegin('nextK', TMessageType.CALL, self._seqid) args = nextK_args() args.scanner = scanner args.k = k args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_nextK(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = nextK_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "nextK failed: unknown result") def closeScanner(self, scanner): """ Parameters: - scanner """ self.send_closeScanner(scanner) self.recv_closeScanner() def send_closeScanner(self, scanner): self._oprot.writeMessageBegin('closeScanner', TMessageType.CALL, self._seqid) args = closeScanner_args() args.scanner = scanner args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_closeScanner(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = closeScanner_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 return def updateAndFlush(self, login, tableName, cells): """ Parameters: - login - tableName - cells """ self.send_updateAndFlush(login, tableName, cells) self.recv_updateAndFlush() def send_updateAndFlush(self, login, tableName, cells): self._oprot.writeMessageBegin('updateAndFlush', TMessageType.CALL, self._seqid) args = updateAndFlush_args() args.login = login args.tableName = tableName args.cells = cells args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_updateAndFlush(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = updateAndFlush_result() result.read(iprot) iprot.readMessageEnd() if result.outch1 is not None: raise result.outch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 if result.ouch4 is not None: raise result.ouch4 return def createWriter(self, login, tableName, opts): """ Parameters: - login - tableName - opts """ self.send_createWriter(login, tableName, opts) return self.recv_createWriter() def send_createWriter(self, login, tableName, opts): self._oprot.writeMessageBegin('createWriter', TMessageType.CALL, self._seqid) args = createWriter_args() args.login = login args.tableName = tableName args.opts = opts args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_createWriter(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = createWriter_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.outch1 is not None: raise result.outch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "createWriter failed: unknown result") def update(self, writer, cells): """ Parameters: - writer - cells """ self.send_update(writer, cells) def send_update(self, writer, cells): self._oprot.writeMessageBegin('update', TMessageType.ONEWAY, self._seqid) args = update_args() args.writer = writer args.cells = cells args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def flush(self, writer): """ Parameters: - writer """ self.send_flush(writer) self.recv_flush() def send_flush(self, writer): self._oprot.writeMessageBegin('flush', TMessageType.CALL, self._seqid) args = flush_args() args.writer = writer args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_flush(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = flush_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def closeWriter(self, writer): """ Parameters: - writer """ self.send_closeWriter(writer) self.recv_closeWriter() def send_closeWriter(self, writer): self._oprot.writeMessageBegin('closeWriter', TMessageType.CALL, self._seqid) args = closeWriter_args() args.writer = writer args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_closeWriter(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = closeWriter_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return def updateRowConditionally(self, login, tableName, row, updates): """ Parameters: - login - tableName - row - updates """ self.send_updateRowConditionally(login, tableName, row, updates) return self.recv_updateRowConditionally() def send_updateRowConditionally(self, login, tableName, row, updates): self._oprot.writeMessageBegin('updateRowConditionally', TMessageType.CALL, self._seqid) args = updateRowConditionally_args() args.login = login args.tableName = tableName args.row = row args.updates = updates args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_updateRowConditionally(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = updateRowConditionally_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "updateRowConditionally failed: unknown result") def createConditionalWriter(self, login, tableName, options): """ Parameters: - login - tableName - options """ self.send_createConditionalWriter(login, tableName, options) return self.recv_createConditionalWriter() def send_createConditionalWriter(self, login, tableName, options): self._oprot.writeMessageBegin('createConditionalWriter', TMessageType.CALL, self._seqid) args = createConditionalWriter_args() args.login = login args.tableName = tableName args.options = options args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_createConditionalWriter(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = createConditionalWriter_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "createConditionalWriter failed: unknown result") def updateRowsConditionally(self, conditionalWriter, updates): """ Parameters: - conditionalWriter - updates """ self.send_updateRowsConditionally(conditionalWriter, updates) return self.recv_updateRowsConditionally() def send_updateRowsConditionally(self, conditionalWriter, updates): self._oprot.writeMessageBegin('updateRowsConditionally', TMessageType.CALL, self._seqid) args = updateRowsConditionally_args() args.conditionalWriter = conditionalWriter args.updates = updates args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_updateRowsConditionally(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = updateRowsConditionally_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "updateRowsConditionally failed: unknown result") def closeConditionalWriter(self, conditionalWriter): """ Parameters: - conditionalWriter """ self.send_closeConditionalWriter(conditionalWriter) self.recv_closeConditionalWriter() def send_closeConditionalWriter(self, conditionalWriter): self._oprot.writeMessageBegin('closeConditionalWriter', TMessageType.CALL, self._seqid) args = closeConditionalWriter_args() args.conditionalWriter = conditionalWriter args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_closeConditionalWriter(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = closeConditionalWriter_result() result.read(iprot) iprot.readMessageEnd() return def getRowRange(self, row): """ Parameters: - row """ self.send_getRowRange(row) return self.recv_getRowRange() def send_getRowRange(self, row): self._oprot.writeMessageBegin('getRowRange', TMessageType.CALL, self._seqid) args = getRowRange_args() args.row = row args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getRowRange(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getRowRange_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "getRowRange failed: unknown result") def getFollowing(self, key, part): """ Parameters: - key - part """ self.send_getFollowing(key, part) return self.recv_getFollowing() def send_getFollowing(self, key, part): self._oprot.writeMessageBegin('getFollowing', TMessageType.CALL, self._seqid) args = getFollowing_args() args.key = key args.part = part args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getFollowing(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getFollowing_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "getFollowing failed: unknown result") def systemNamespace(self): self.send_systemNamespace() return self.recv_systemNamespace() def send_systemNamespace(self): self._oprot.writeMessageBegin('systemNamespace', TMessageType.CALL, self._seqid) args = systemNamespace_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_systemNamespace(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = systemNamespace_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "systemNamespace failed: unknown result") def defaultNamespace(self): self.send_defaultNamespace() return self.recv_defaultNamespace() def send_defaultNamespace(self): self._oprot.writeMessageBegin('defaultNamespace', TMessageType.CALL, self._seqid) args = defaultNamespace_args() args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_defaultNamespace(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = defaultNamespace_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "defaultNamespace failed: unknown result") def listNamespaces(self, login): """ Parameters: - login """ self.send_listNamespaces(login) return self.recv_listNamespaces() def send_listNamespaces(self, login): self._oprot.writeMessageBegin('listNamespaces', TMessageType.CALL, self._seqid) args = listNamespaces_args() args.login = login args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_listNamespaces(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = listNamespaces_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "listNamespaces failed: unknown result") def namespaceExists(self, login, namespaceName): """ Parameters: - login - namespaceName """ self.send_namespaceExists(login, namespaceName) return self.recv_namespaceExists() def send_namespaceExists(self, login, namespaceName): self._oprot.writeMessageBegin('namespaceExists', TMessageType.CALL, self._seqid) args = namespaceExists_args() args.login = login args.namespaceName = namespaceName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_namespaceExists(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = namespaceExists_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "namespaceExists failed: unknown result") def createNamespace(self, login, namespaceName): """ Parameters: - login - namespaceName """ self.send_createNamespace(login, namespaceName) self.recv_createNamespace() def send_createNamespace(self, login, namespaceName): self._oprot.writeMessageBegin('createNamespace', TMessageType.CALL, self._seqid) args = createNamespace_args() args.login = login args.namespaceName = namespaceName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_createNamespace(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = createNamespace_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def deleteNamespace(self, login, namespaceName): """ Parameters: - login - namespaceName """ self.send_deleteNamespace(login, namespaceName) self.recv_deleteNamespace() def send_deleteNamespace(self, login, namespaceName): self._oprot.writeMessageBegin('deleteNamespace', TMessageType.CALL, self._seqid) args = deleteNamespace_args() args.login = login args.namespaceName = namespaceName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_deleteNamespace(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = deleteNamespace_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 if result.ouch4 is not None: raise result.ouch4 return def renameNamespace(self, login, oldNamespaceName, newNamespaceName): """ Parameters: - login - oldNamespaceName - newNamespaceName """ self.send_renameNamespace(login, oldNamespaceName, newNamespaceName) self.recv_renameNamespace() def send_renameNamespace(self, login, oldNamespaceName, newNamespaceName): self._oprot.writeMessageBegin('renameNamespace', TMessageType.CALL, self._seqid) args = renameNamespace_args() args.login = login args.oldNamespaceName = oldNamespaceName args.newNamespaceName = newNamespaceName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_renameNamespace(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = renameNamespace_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 if result.ouch4 is not None: raise result.ouch4 return def setNamespaceProperty(self, login, namespaceName, property, value): """ Parameters: - login - namespaceName - property - value """ self.send_setNamespaceProperty(login, namespaceName, property, value) self.recv_setNamespaceProperty() def send_setNamespaceProperty(self, login, namespaceName, property, value): self._oprot.writeMessageBegin('setNamespaceProperty', TMessageType.CALL, self._seqid) args = setNamespaceProperty_args() args.login = login args.namespaceName = namespaceName args.property = property args.value = value args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_setNamespaceProperty(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = setNamespaceProperty_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def removeNamespaceProperty(self, login, namespaceName, property): """ Parameters: - login - namespaceName - property """ self.send_removeNamespaceProperty(login, namespaceName, property) self.recv_removeNamespaceProperty() def send_removeNamespaceProperty(self, login, namespaceName, property): self._oprot.writeMessageBegin('removeNamespaceProperty', TMessageType.CALL, self._seqid) args = removeNamespaceProperty_args() args.login = login args.namespaceName = namespaceName args.property = property args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_removeNamespaceProperty(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = removeNamespaceProperty_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def getNamespaceProperties(self, login, namespaceName): """ Parameters: - login - namespaceName """ self.send_getNamespaceProperties(login, namespaceName) return self.recv_getNamespaceProperties() def send_getNamespaceProperties(self, login, namespaceName): self._oprot.writeMessageBegin('getNamespaceProperties', TMessageType.CALL, self._seqid) args = getNamespaceProperties_args() args.login = login args.namespaceName = namespaceName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getNamespaceProperties(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getNamespaceProperties_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "getNamespaceProperties failed: unknown result") def namespaceIdMap(self, login): """ Parameters: - login """ self.send_namespaceIdMap(login) return self.recv_namespaceIdMap() def send_namespaceIdMap(self, login): self._oprot.writeMessageBegin('namespaceIdMap', TMessageType.CALL, self._seqid) args = namespaceIdMap_args() args.login = login args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_namespaceIdMap(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = namespaceIdMap_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 raise TApplicationException(TApplicationException.MISSING_RESULT, "namespaceIdMap failed: unknown result") def attachNamespaceIterator(self, login, namespaceName, setting, scopes): """ Parameters: - login - namespaceName - setting - scopes """ self.send_attachNamespaceIterator(login, namespaceName, setting, scopes) self.recv_attachNamespaceIterator() def send_attachNamespaceIterator(self, login, namespaceName, setting, scopes): self._oprot.writeMessageBegin('attachNamespaceIterator', TMessageType.CALL, self._seqid) args = attachNamespaceIterator_args() args.login = login args.namespaceName = namespaceName args.setting = setting args.scopes = scopes args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_attachNamespaceIterator(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = attachNamespaceIterator_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def removeNamespaceIterator(self, login, namespaceName, name, scopes): """ Parameters: - login - namespaceName - name - scopes """ self.send_removeNamespaceIterator(login, namespaceName, name, scopes) self.recv_removeNamespaceIterator() def send_removeNamespaceIterator(self, login, namespaceName, name, scopes): self._oprot.writeMessageBegin('removeNamespaceIterator', TMessageType.CALL, self._seqid) args = removeNamespaceIterator_args() args.login = login args.namespaceName = namespaceName args.name = name args.scopes = scopes args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_removeNamespaceIterator(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = removeNamespaceIterator_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def getNamespaceIteratorSetting(self, login, namespaceName, name, scope): """ Parameters: - login - namespaceName - name - scope """ self.send_getNamespaceIteratorSetting(login, namespaceName, name, scope) return self.recv_getNamespaceIteratorSetting() def send_getNamespaceIteratorSetting(self, login, namespaceName, name, scope): self._oprot.writeMessageBegin('getNamespaceIteratorSetting', TMessageType.CALL, self._seqid) args = getNamespaceIteratorSetting_args() args.login = login args.namespaceName = namespaceName args.name = name args.scope = scope args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_getNamespaceIteratorSetting(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getNamespaceIteratorSetting_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "getNamespaceIteratorSetting failed: unknown result") def listNamespaceIterators(self, login, namespaceName): """ Parameters: - login - namespaceName """ self.send_listNamespaceIterators(login, namespaceName) return self.recv_listNamespaceIterators() def send_listNamespaceIterators(self, login, namespaceName): self._oprot.writeMessageBegin('listNamespaceIterators', TMessageType.CALL, self._seqid) args = listNamespaceIterators_args() args.login = login args.namespaceName = namespaceName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_listNamespaceIterators(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = listNamespaceIterators_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "listNamespaceIterators failed: unknown result") def checkNamespaceIteratorConflicts(self, login, namespaceName, setting, scopes): """ Parameters: - login - namespaceName - setting - scopes """ self.send_checkNamespaceIteratorConflicts(login, namespaceName, setting, scopes) self.recv_checkNamespaceIteratorConflicts() def send_checkNamespaceIteratorConflicts(self, login, namespaceName, setting, scopes): self._oprot.writeMessageBegin('checkNamespaceIteratorConflicts', TMessageType.CALL, self._seqid) args = checkNamespaceIteratorConflicts_args() args.login = login args.namespaceName = namespaceName args.setting = setting args.scopes = scopes args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_checkNamespaceIteratorConflicts(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = checkNamespaceIteratorConflicts_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def addNamespaceConstraint(self, login, namespaceName, constraintClassName): """ Parameters: - login - namespaceName - constraintClassName """ self.send_addNamespaceConstraint(login, namespaceName, constraintClassName) return self.recv_addNamespaceConstraint() def send_addNamespaceConstraint(self, login, namespaceName, constraintClassName): self._oprot.writeMessageBegin('addNamespaceConstraint', TMessageType.CALL, self._seqid) args = addNamespaceConstraint_args() args.login = login args.namespaceName = namespaceName args.constraintClassName = constraintClassName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_addNamespaceConstraint(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = addNamespaceConstraint_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "addNamespaceConstraint failed: unknown result") def removeNamespaceConstraint(self, login, namespaceName, id): """ Parameters: - login - namespaceName - id """ self.send_removeNamespaceConstraint(login, namespaceName, id) self.recv_removeNamespaceConstraint() def send_removeNamespaceConstraint(self, login, namespaceName, id): self._oprot.writeMessageBegin('removeNamespaceConstraint', TMessageType.CALL, self._seqid) args = removeNamespaceConstraint_args() args.login = login args.namespaceName = namespaceName args.id = id args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_removeNamespaceConstraint(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = removeNamespaceConstraint_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 return def listNamespaceConstraints(self, login, namespaceName): """ Parameters: - login - namespaceName """ self.send_listNamespaceConstraints(login, namespaceName) return self.recv_listNamespaceConstraints() def send_listNamespaceConstraints(self, login, namespaceName): self._oprot.writeMessageBegin('listNamespaceConstraints', TMessageType.CALL, self._seqid) args = listNamespaceConstraints_args() args.login = login args.namespaceName = namespaceName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_listNamespaceConstraints(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = listNamespaceConstraints_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "listNamespaceConstraints failed: unknown result") def testNamespaceClassLoad(self, login, namespaceName, className, asTypeName): """ Parameters: - login - namespaceName - className - asTypeName """ self.send_testNamespaceClassLoad(login, namespaceName, className, asTypeName) return self.recv_testNamespaceClassLoad() def send_testNamespaceClassLoad(self, login, namespaceName, className, asTypeName): self._oprot.writeMessageBegin('testNamespaceClassLoad', TMessageType.CALL, self._seqid) args = testNamespaceClassLoad_args() args.login = login args.namespaceName = namespaceName args.className = className args.asTypeName = asTypeName args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_testNamespaceClassLoad(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = testNamespaceClassLoad_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 if result.ouch3 is not None: raise result.ouch3 raise TApplicationException(TApplicationException.MISSING_RESULT, "testNamespaceClassLoad failed: unknown result") class Processor(Iface, TProcessor): def __init__(self, handler): self._handler = handler self._processMap = {} self._processMap["login"] = Processor.process_login self._processMap["addConstraint"] = Processor.process_addConstraint self._processMap["addSplits"] = Processor.process_addSplits self._processMap["attachIterator"] = Processor.process_attachIterator self._processMap["checkIteratorConflicts"] = Processor.process_checkIteratorConflicts self._processMap["clearLocatorCache"] = Processor.process_clearLocatorCache self._processMap["cloneTable"] = Processor.process_cloneTable self._processMap["compactTable"] = Processor.process_compactTable self._processMap["cancelCompaction"] = Processor.process_cancelCompaction self._processMap["createTable"] = Processor.process_createTable self._processMap["deleteTable"] = Processor.process_deleteTable self._processMap["deleteRows"] = Processor.process_deleteRows self._processMap["exportTable"] = Processor.process_exportTable self._processMap["flushTable"] = Processor.process_flushTable self._processMap["getDiskUsage"] = Processor.process_getDiskUsage self._processMap["getLocalityGroups"] = Processor.process_getLocalityGroups self._processMap["getIteratorSetting"] = Processor.process_getIteratorSetting self._processMap["getMaxRow"] = Processor.process_getMaxRow self._processMap["getTableProperties"] = Processor.process_getTableProperties self._processMap["importDirectory"] = Processor.process_importDirectory self._processMap["importTable"] = Processor.process_importTable self._processMap["listSplits"] = Processor.process_listSplits self._processMap["listTables"] = Processor.process_listTables self._processMap["listIterators"] = Processor.process_listIterators self._processMap["listConstraints"] = Processor.process_listConstraints self._processMap["mergeTablets"] = Processor.process_mergeTablets self._processMap["offlineTable"] = Processor.process_offlineTable self._processMap["onlineTable"] = Processor.process_onlineTable self._processMap["removeConstraint"] = Processor.process_removeConstraint self._processMap["removeIterator"] = Processor.process_removeIterator self._processMap["removeTableProperty"] = Processor.process_removeTableProperty self._processMap["renameTable"] = Processor.process_renameTable self._processMap["setLocalityGroups"] = Processor.process_setLocalityGroups self._processMap["setTableProperty"] = Processor.process_setTableProperty self._processMap["splitRangeByTablets"] = Processor.process_splitRangeByTablets self._processMap["tableExists"] = Processor.process_tableExists self._processMap["tableIdMap"] = Processor.process_tableIdMap self._processMap["testTableClassLoad"] = Processor.process_testTableClassLoad self._processMap["pingTabletServer"] = Processor.process_pingTabletServer self._processMap["getActiveScans"] = Processor.process_getActiveScans self._processMap["getActiveCompactions"] = Processor.process_getActiveCompactions self._processMap["getSiteConfiguration"] = Processor.process_getSiteConfiguration self._processMap["getSystemConfiguration"] = Processor.process_getSystemConfiguration self._processMap["getTabletServers"] = Processor.process_getTabletServers self._processMap["removeProperty"] = Processor.process_removeProperty self._processMap["setProperty"] = Processor.process_setProperty self._processMap["testClassLoad"] = Processor.process_testClassLoad self._processMap["authenticateUser"] = Processor.process_authenticateUser self._processMap["changeUserAuthorizations"] = Processor.process_changeUserAuthorizations self._processMap["changeLocalUserPassword"] = Processor.process_changeLocalUserPassword self._processMap["createLocalUser"] = Processor.process_createLocalUser self._processMap["dropLocalUser"] = Processor.process_dropLocalUser self._processMap["getUserAuthorizations"] = Processor.process_getUserAuthorizations self._processMap["grantSystemPermission"] = Processor.process_grantSystemPermission self._processMap["grantTablePermission"] = Processor.process_grantTablePermission self._processMap["hasSystemPermission"] = Processor.process_hasSystemPermission self._processMap["hasTablePermission"] = Processor.process_hasTablePermission self._processMap["listLocalUsers"] = Processor.process_listLocalUsers self._processMap["revokeSystemPermission"] = Processor.process_revokeSystemPermission self._processMap["revokeTablePermission"] = Processor.process_revokeTablePermission self._processMap["grantNamespacePermission"] = Processor.process_grantNamespacePermission self._processMap["hasNamespacePermission"] = Processor.process_hasNamespacePermission self._processMap["revokeNamespacePermission"] = Processor.process_revokeNamespacePermission self._processMap["createBatchScanner"] = Processor.process_createBatchScanner self._processMap["createScanner"] = Processor.process_createScanner self._processMap["hasNext"] = Processor.process_hasNext self._processMap["nextEntry"] = Processor.process_nextEntry self._processMap["nextK"] = Processor.process_nextK self._processMap["closeScanner"] = Processor.process_closeScanner self._processMap["updateAndFlush"] = Processor.process_updateAndFlush self._processMap["createWriter"] = Processor.process_createWriter self._processMap["update"] = Processor.process_update self._processMap["flush"] = Processor.process_flush self._processMap["closeWriter"] = Processor.process_closeWriter self._processMap["updateRowConditionally"] = Processor.process_updateRowConditionally self._processMap["createConditionalWriter"] = Processor.process_createConditionalWriter self._processMap["updateRowsConditionally"] = Processor.process_updateRowsConditionally self._processMap["closeConditionalWriter"] = Processor.process_closeConditionalWriter self._processMap["getRowRange"] = Processor.process_getRowRange self._processMap["getFollowing"] = Processor.process_getFollowing self._processMap["systemNamespace"] = Processor.process_systemNamespace self._processMap["defaultNamespace"] = Processor.process_defaultNamespace self._processMap["listNamespaces"] = Processor.process_listNamespaces self._processMap["namespaceExists"] = Processor.process_namespaceExists self._processMap["createNamespace"] = Processor.process_createNamespace self._processMap["deleteNamespace"] = Processor.process_deleteNamespace self._processMap["renameNamespace"] = Processor.process_renameNamespace self._processMap["setNamespaceProperty"] = Processor.process_setNamespaceProperty self._processMap["removeNamespaceProperty"] = Processor.process_removeNamespaceProperty self._processMap["getNamespaceProperties"] = Processor.process_getNamespaceProperties self._processMap["namespaceIdMap"] = Processor.process_namespaceIdMap self._processMap["attachNamespaceIterator"] = Processor.process_attachNamespaceIterator self._processMap["removeNamespaceIterator"] = Processor.process_removeNamespaceIterator self._processMap["getNamespaceIteratorSetting"] = Processor.process_getNamespaceIteratorSetting self._processMap["listNamespaceIterators"] = Processor.process_listNamespaceIterators self._processMap["checkNamespaceIteratorConflicts"] = Processor.process_checkNamespaceIteratorConflicts self._processMap["addNamespaceConstraint"] = Processor.process_addNamespaceConstraint self._processMap["removeNamespaceConstraint"] = Processor.process_removeNamespaceConstraint self._processMap["listNamespaceConstraints"] = Processor.process_listNamespaceConstraints self._processMap["testNamespaceClassLoad"] = Processor.process_testNamespaceClassLoad def process(self, iprot, oprot): (name, type, seqid) = iprot.readMessageBegin() if name not in self._processMap: iprot.skip(TType.STRUCT) iprot.readMessageEnd() x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) x.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() return else: self._processMap[name](self, seqid, iprot, oprot) return True def process_login(self, seqid, iprot, oprot): args = login_args() args.read(iprot) iprot.readMessageEnd() result = login_result() try: result.success = self._handler.login(args.principal, args.loginProperties) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("login", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_addConstraint(self, seqid, iprot, oprot): args = addConstraint_args() args.read(iprot) iprot.readMessageEnd() result = addConstraint_result() try: result.success = self._handler.addConstraint(args.login, args.tableName, args.constraintClassName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("addConstraint", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_addSplits(self, seqid, iprot, oprot): args = addSplits_args() args.read(iprot) iprot.readMessageEnd() result = addSplits_result() try: self._handler.addSplits(args.login, args.tableName, args.splits) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("addSplits", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_attachIterator(self, seqid, iprot, oprot): args = attachIterator_args() args.read(iprot) iprot.readMessageEnd() result = attachIterator_result() try: self._handler.attachIterator(args.login, args.tableName, args.setting, args.scopes) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloSecurityException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("attachIterator", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_checkIteratorConflicts(self, seqid, iprot, oprot): args = checkIteratorConflicts_args() args.read(iprot) iprot.readMessageEnd() result = checkIteratorConflicts_result() try: self._handler.checkIteratorConflicts(args.login, args.tableName, args.setting, args.scopes) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloSecurityException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("checkIteratorConflicts", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_clearLocatorCache(self, seqid, iprot, oprot): args = clearLocatorCache_args() args.read(iprot) iprot.readMessageEnd() result = clearLocatorCache_result() try: self._handler.clearLocatorCache(args.login, args.tableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except TableNotFoundException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("clearLocatorCache", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_cloneTable(self, seqid, iprot, oprot): args = cloneTable_args() args.read(iprot) iprot.readMessageEnd() result = cloneTable_result() try: self._handler.cloneTable(args.login, args.tableName, args.newTableName, args.flush, args.propertiesToSet, args.propertiesToExclude) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except TableExistsException as ouch4: msg_type = TMessageType.REPLY result.ouch4 = ouch4 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("cloneTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_compactTable(self, seqid, iprot, oprot): args = compactTable_args() args.read(iprot) iprot.readMessageEnd() result = compactTable_result() try: self._handler.compactTable(args.login, args.tableName, args.startRow, args.endRow, args.iterators, args.flush, args.wait, args.compactionStrategy) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloSecurityException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except TableNotFoundException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except AccumuloException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("compactTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_cancelCompaction(self, seqid, iprot, oprot): args = cancelCompaction_args() args.read(iprot) iprot.readMessageEnd() result = cancelCompaction_result() try: self._handler.cancelCompaction(args.login, args.tableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloSecurityException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except TableNotFoundException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except AccumuloException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("cancelCompaction", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_createTable(self, seqid, iprot, oprot): args = createTable_args() args.read(iprot) iprot.readMessageEnd() result = createTable_result() try: self._handler.createTable(args.login, args.tableName, args.versioningIter, args.type) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableExistsException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("createTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_deleteTable(self, seqid, iprot, oprot): args = deleteTable_args() args.read(iprot) iprot.readMessageEnd() result = deleteTable_result() try: self._handler.deleteTable(args.login, args.tableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("deleteTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_deleteRows(self, seqid, iprot, oprot): args = deleteRows_args() args.read(iprot) iprot.readMessageEnd() result = deleteRows_result() try: self._handler.deleteRows(args.login, args.tableName, args.startRow, args.endRow) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("deleteRows", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_exportTable(self, seqid, iprot, oprot): args = exportTable_args() args.read(iprot) iprot.readMessageEnd() result = exportTable_result() try: self._handler.exportTable(args.login, args.tableName, args.exportDir) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("exportTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_flushTable(self, seqid, iprot, oprot): args = flushTable_args() args.read(iprot) iprot.readMessageEnd() result = flushTable_result() try: self._handler.flushTable(args.login, args.tableName, args.startRow, args.endRow, args.wait) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("flushTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getDiskUsage(self, seqid, iprot, oprot): args = getDiskUsage_args() args.read(iprot) iprot.readMessageEnd() result = getDiskUsage_result() try: result.success = self._handler.getDiskUsage(args.login, args.tables) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getDiskUsage", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getLocalityGroups(self, seqid, iprot, oprot): args = getLocalityGroups_args() args.read(iprot) iprot.readMessageEnd() result = getLocalityGroups_result() try: result.success = self._handler.getLocalityGroups(args.login, args.tableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getLocalityGroups", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getIteratorSetting(self, seqid, iprot, oprot): args = getIteratorSetting_args() args.read(iprot) iprot.readMessageEnd() result = getIteratorSetting_result() try: result.success = self._handler.getIteratorSetting(args.login, args.tableName, args.iteratorName, args.scope) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getIteratorSetting", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getMaxRow(self, seqid, iprot, oprot): args = getMaxRow_args() args.read(iprot) iprot.readMessageEnd() result = getMaxRow_result() try: result.success = self._handler.getMaxRow(args.login, args.tableName, args.auths, args.startRow, args.startInclusive, args.endRow, args.endInclusive) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getMaxRow", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getTableProperties(self, seqid, iprot, oprot): args = getTableProperties_args() args.read(iprot) iprot.readMessageEnd() result = getTableProperties_result() try: result.success = self._handler.getTableProperties(args.login, args.tableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getTableProperties", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_importDirectory(self, seqid, iprot, oprot): args = importDirectory_args() args.read(iprot) iprot.readMessageEnd() result = importDirectory_result() try: self._handler.importDirectory(args.login, args.tableName, args.importDir, args.failureDir, args.setTime) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except TableNotFoundException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except AccumuloSecurityException as ouch4: msg_type = TMessageType.REPLY result.ouch4 = ouch4 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("importDirectory", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_importTable(self, seqid, iprot, oprot): args = importTable_args() args.read(iprot) iprot.readMessageEnd() result = importTable_result() try: self._handler.importTable(args.login, args.tableName, args.importDir) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except TableExistsException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except AccumuloSecurityException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("importTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_listSplits(self, seqid, iprot, oprot): args = listSplits_args() args.read(iprot) iprot.readMessageEnd() result = listSplits_result() try: result.success = self._handler.listSplits(args.login, args.tableName, args.maxSplits) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("listSplits", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_listTables(self, seqid, iprot, oprot): args = listTables_args() args.read(iprot) iprot.readMessageEnd() result = listTables_result() try: result.success = self._handler.listTables(args.login) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("listTables", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_listIterators(self, seqid, iprot, oprot): args = listIterators_args() args.read(iprot) iprot.readMessageEnd() result = listIterators_result() try: result.success = self._handler.listIterators(args.login, args.tableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("listIterators", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_listConstraints(self, seqid, iprot, oprot): args = listConstraints_args() args.read(iprot) iprot.readMessageEnd() result = listConstraints_result() try: result.success = self._handler.listConstraints(args.login, args.tableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("listConstraints", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_mergeTablets(self, seqid, iprot, oprot): args = mergeTablets_args() args.read(iprot) iprot.readMessageEnd() result = mergeTablets_result() try: self._handler.mergeTablets(args.login, args.tableName, args.startRow, args.endRow) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("mergeTablets", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_offlineTable(self, seqid, iprot, oprot): args = offlineTable_args() args.read(iprot) iprot.readMessageEnd() result = offlineTable_result() try: self._handler.offlineTable(args.login, args.tableName, args.wait) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("offlineTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_onlineTable(self, seqid, iprot, oprot): args = onlineTable_args() args.read(iprot) iprot.readMessageEnd() result = onlineTable_result() try: self._handler.onlineTable(args.login, args.tableName, args.wait) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("onlineTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_removeConstraint(self, seqid, iprot, oprot): args = removeConstraint_args() args.read(iprot) iprot.readMessageEnd() result = removeConstraint_result() try: self._handler.removeConstraint(args.login, args.tableName, args.constraint) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("removeConstraint", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_removeIterator(self, seqid, iprot, oprot): args = removeIterator_args() args.read(iprot) iprot.readMessageEnd() result = removeIterator_result() try: self._handler.removeIterator(args.login, args.tableName, args.iterName, args.scopes) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("removeIterator", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_removeTableProperty(self, seqid, iprot, oprot): args = removeTableProperty_args() args.read(iprot) iprot.readMessageEnd() result = removeTableProperty_result() try: self._handler.removeTableProperty(args.login, args.tableName, args.property) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("removeTableProperty", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_renameTable(self, seqid, iprot, oprot): args = renameTable_args() args.read(iprot) iprot.readMessageEnd() result = renameTable_result() try: self._handler.renameTable(args.login, args.oldTableName, args.newTableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except TableExistsException as ouch4: msg_type = TMessageType.REPLY result.ouch4 = ouch4 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("renameTable", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_setLocalityGroups(self, seqid, iprot, oprot): args = setLocalityGroups_args() args.read(iprot) iprot.readMessageEnd() result = setLocalityGroups_result() try: self._handler.setLocalityGroups(args.login, args.tableName, args.groups) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("setLocalityGroups", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_setTableProperty(self, seqid, iprot, oprot): args = setTableProperty_args() args.read(iprot) iprot.readMessageEnd() result = setTableProperty_result() try: self._handler.setTableProperty(args.login, args.tableName, args.property, args.value) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("setTableProperty", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_splitRangeByTablets(self, seqid, iprot, oprot): args = splitRangeByTablets_args() args.read(iprot) iprot.readMessageEnd() result = splitRangeByTablets_result() try: result.success = self._handler.splitRangeByTablets(args.login, args.tableName, args.range, args.maxSplits) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("splitRangeByTablets", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_tableExists(self, seqid, iprot, oprot): args = tableExists_args() args.read(iprot) iprot.readMessageEnd() result = tableExists_result() try: result.success = self._handler.tableExists(args.login, args.tableName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("tableExists", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_tableIdMap(self, seqid, iprot, oprot): args = tableIdMap_args() args.read(iprot) iprot.readMessageEnd() result = tableIdMap_result() try: result.success = self._handler.tableIdMap(args.login) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("tableIdMap", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_testTableClassLoad(self, seqid, iprot, oprot): args = testTableClassLoad_args() args.read(iprot) iprot.readMessageEnd() result = testTableClassLoad_result() try: result.success = self._handler.testTableClassLoad(args.login, args.tableName, args.className, args.asTypeName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("testTableClassLoad", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_pingTabletServer(self, seqid, iprot, oprot): args = pingTabletServer_args() args.read(iprot) iprot.readMessageEnd() result = pingTabletServer_result() try: self._handler.pingTabletServer(args.login, args.tserver) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("pingTabletServer", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getActiveScans(self, seqid, iprot, oprot): args = getActiveScans_args() args.read(iprot) iprot.readMessageEnd() result = getActiveScans_result() try: result.success = self._handler.getActiveScans(args.login, args.tserver) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getActiveScans", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getActiveCompactions(self, seqid, iprot, oprot): args = getActiveCompactions_args() args.read(iprot) iprot.readMessageEnd() result = getActiveCompactions_result() try: result.success = self._handler.getActiveCompactions(args.login, args.tserver) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getActiveCompactions", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getSiteConfiguration(self, seqid, iprot, oprot): args = getSiteConfiguration_args() args.read(iprot) iprot.readMessageEnd() result = getSiteConfiguration_result() try: result.success = self._handler.getSiteConfiguration(args.login) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getSiteConfiguration", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getSystemConfiguration(self, seqid, iprot, oprot): args = getSystemConfiguration_args() args.read(iprot) iprot.readMessageEnd() result = getSystemConfiguration_result() try: result.success = self._handler.getSystemConfiguration(args.login) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getSystemConfiguration", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getTabletServers(self, seqid, iprot, oprot): args = getTabletServers_args() args.read(iprot) iprot.readMessageEnd() result = getTabletServers_result() try: result.success = self._handler.getTabletServers(args.login) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getTabletServers", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_removeProperty(self, seqid, iprot, oprot): args = removeProperty_args() args.read(iprot) iprot.readMessageEnd() result = removeProperty_result() try: self._handler.removeProperty(args.login, args.property) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("removeProperty", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_setProperty(self, seqid, iprot, oprot): args = setProperty_args() args.read(iprot) iprot.readMessageEnd() result = setProperty_result() try: self._handler.setProperty(args.login, args.property, args.value) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("setProperty", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_testClassLoad(self, seqid, iprot, oprot): args = testClassLoad_args() args.read(iprot) iprot.readMessageEnd() result = testClassLoad_result() try: result.success = self._handler.testClassLoad(args.login, args.className, args.asTypeName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("testClassLoad", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_authenticateUser(self, seqid, iprot, oprot): args = authenticateUser_args() args.read(iprot) iprot.readMessageEnd() result = authenticateUser_result() try: result.success = self._handler.authenticateUser(args.login, args.user, args.properties) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("authenticateUser", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_changeUserAuthorizations(self, seqid, iprot, oprot): args = changeUserAuthorizations_args() args.read(iprot) iprot.readMessageEnd() result = changeUserAuthorizations_result() try: self._handler.changeUserAuthorizations(args.login, args.user, args.authorizations) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("changeUserAuthorizations", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_changeLocalUserPassword(self, seqid, iprot, oprot): args = changeLocalUserPassword_args() args.read(iprot) iprot.readMessageEnd() result = changeLocalUserPassword_result() try: self._handler.changeLocalUserPassword(args.login, args.user, args.password) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("changeLocalUserPassword", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_createLocalUser(self, seqid, iprot, oprot): args = createLocalUser_args() args.read(iprot) iprot.readMessageEnd() result = createLocalUser_result() try: self._handler.createLocalUser(args.login, args.user, args.password) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("createLocalUser", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_dropLocalUser(self, seqid, iprot, oprot): args = dropLocalUser_args() args.read(iprot) iprot.readMessageEnd() result = dropLocalUser_result() try: self._handler.dropLocalUser(args.login, args.user) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("dropLocalUser", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getUserAuthorizations(self, seqid, iprot, oprot): args = getUserAuthorizations_args() args.read(iprot) iprot.readMessageEnd() result = getUserAuthorizations_result() try: result.success = self._handler.getUserAuthorizations(args.login, args.user) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getUserAuthorizations", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_grantSystemPermission(self, seqid, iprot, oprot): args = grantSystemPermission_args() args.read(iprot) iprot.readMessageEnd() result = grantSystemPermission_result() try: self._handler.grantSystemPermission(args.login, args.user, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("grantSystemPermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_grantTablePermission(self, seqid, iprot, oprot): args = grantTablePermission_args() args.read(iprot) iprot.readMessageEnd() result = grantTablePermission_result() try: self._handler.grantTablePermission(args.login, args.user, args.table, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("grantTablePermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_hasSystemPermission(self, seqid, iprot, oprot): args = hasSystemPermission_args() args.read(iprot) iprot.readMessageEnd() result = hasSystemPermission_result() try: result.success = self._handler.hasSystemPermission(args.login, args.user, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("hasSystemPermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_hasTablePermission(self, seqid, iprot, oprot): args = hasTablePermission_args() args.read(iprot) iprot.readMessageEnd() result = hasTablePermission_result() try: result.success = self._handler.hasTablePermission(args.login, args.user, args.table, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("hasTablePermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_listLocalUsers(self, seqid, iprot, oprot): args = listLocalUsers_args() args.read(iprot) iprot.readMessageEnd() result = listLocalUsers_result() try: result.success = self._handler.listLocalUsers(args.login) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("listLocalUsers", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_revokeSystemPermission(self, seqid, iprot, oprot): args = revokeSystemPermission_args() args.read(iprot) iprot.readMessageEnd() result = revokeSystemPermission_result() try: self._handler.revokeSystemPermission(args.login, args.user, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("revokeSystemPermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_revokeTablePermission(self, seqid, iprot, oprot): args = revokeTablePermission_args() args.read(iprot) iprot.readMessageEnd() result = revokeTablePermission_result() try: self._handler.revokeTablePermission(args.login, args.user, args.table, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("revokeTablePermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_grantNamespacePermission(self, seqid, iprot, oprot): args = grantNamespacePermission_args() args.read(iprot) iprot.readMessageEnd() result = grantNamespacePermission_result() try: self._handler.grantNamespacePermission(args.login, args.user, args.namespaceName, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("grantNamespacePermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_hasNamespacePermission(self, seqid, iprot, oprot): args = hasNamespacePermission_args() args.read(iprot) iprot.readMessageEnd() result = hasNamespacePermission_result() try: result.success = self._handler.hasNamespacePermission(args.login, args.user, args.namespaceName, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("hasNamespacePermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_revokeNamespacePermission(self, seqid, iprot, oprot): args = revokeNamespacePermission_args() args.read(iprot) iprot.readMessageEnd() result = revokeNamespacePermission_result() try: self._handler.revokeNamespacePermission(args.login, args.user, args.namespaceName, args.perm) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("revokeNamespacePermission", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_createBatchScanner(self, seqid, iprot, oprot): args = createBatchScanner_args() args.read(iprot) iprot.readMessageEnd() result = createBatchScanner_result() try: result.success = self._handler.createBatchScanner(args.login, args.tableName, args.options) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("createBatchScanner", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_createScanner(self, seqid, iprot, oprot): args = createScanner_args() args.read(iprot) iprot.readMessageEnd() result = createScanner_result() try: result.success = self._handler.createScanner(args.login, args.tableName, args.options) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("createScanner", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_hasNext(self, seqid, iprot, oprot): args = hasNext_args() args.read(iprot) iprot.readMessageEnd() result = hasNext_result() try: result.success = self._handler.hasNext(args.scanner) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except UnknownScanner as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("hasNext", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_nextEntry(self, seqid, iprot, oprot): args = nextEntry_args() args.read(iprot) iprot.readMessageEnd() result = nextEntry_result() try: result.success = self._handler.nextEntry(args.scanner) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except NoMoreEntriesException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except UnknownScanner as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except AccumuloSecurityException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("nextEntry", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_nextK(self, seqid, iprot, oprot): args = nextK_args() args.read(iprot) iprot.readMessageEnd() result = nextK_result() try: result.success = self._handler.nextK(args.scanner, args.k) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except NoMoreEntriesException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except UnknownScanner as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except AccumuloSecurityException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("nextK", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_closeScanner(self, seqid, iprot, oprot): args = closeScanner_args() args.read(iprot) iprot.readMessageEnd() result = closeScanner_result() try: self._handler.closeScanner(args.scanner) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except UnknownScanner as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("closeScanner", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_updateAndFlush(self, seqid, iprot, oprot): args = updateAndFlush_args() args.read(iprot) iprot.readMessageEnd() result = updateAndFlush_result() try: self._handler.updateAndFlush(args.login, args.tableName, args.cells) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as outch1: msg_type = TMessageType.REPLY result.outch1 = outch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except MutationsRejectedException as ouch4: msg_type = TMessageType.REPLY result.ouch4 = ouch4 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("updateAndFlush", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_createWriter(self, seqid, iprot, oprot): args = createWriter_args() args.read(iprot) iprot.readMessageEnd() result = createWriter_result() try: result.success = self._handler.createWriter(args.login, args.tableName, args.opts) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as outch1: msg_type = TMessageType.REPLY result.outch1 = outch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("createWriter", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_update(self, seqid, iprot, oprot): args = update_args() args.read(iprot) iprot.readMessageEnd() try: self._handler.update(args.writer, args.cells) except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except: pass def process_flush(self, seqid, iprot, oprot): args = flush_args() args.read(iprot) iprot.readMessageEnd() result = flush_result() try: self._handler.flush(args.writer) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except UnknownWriter as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except MutationsRejectedException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("flush", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_closeWriter(self, seqid, iprot, oprot): args = closeWriter_args() args.read(iprot) iprot.readMessageEnd() result = closeWriter_result() try: self._handler.closeWriter(args.writer) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except UnknownWriter as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except MutationsRejectedException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("closeWriter", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_updateRowConditionally(self, seqid, iprot, oprot): args = updateRowConditionally_args() args.read(iprot) iprot.readMessageEnd() result = updateRowConditionally_result() try: result.success = self._handler.updateRowConditionally(args.login, args.tableName, args.row, args.updates) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("updateRowConditionally", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_createConditionalWriter(self, seqid, iprot, oprot): args = createConditionalWriter_args() args.read(iprot) iprot.readMessageEnd() result = createConditionalWriter_result() try: result.success = self._handler.createConditionalWriter(args.login, args.tableName, args.options) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except TableNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("createConditionalWriter", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_updateRowsConditionally(self, seqid, iprot, oprot): args = updateRowsConditionally_args() args.read(iprot) iprot.readMessageEnd() result = updateRowsConditionally_result() try: result.success = self._handler.updateRowsConditionally(args.conditionalWriter, args.updates) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except UnknownWriter as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except AccumuloSecurityException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("updateRowsConditionally", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_closeConditionalWriter(self, seqid, iprot, oprot): args = closeConditionalWriter_args() args.read(iprot) iprot.readMessageEnd() result = closeConditionalWriter_result() try: self._handler.closeConditionalWriter(args.conditionalWriter) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("closeConditionalWriter", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getRowRange(self, seqid, iprot, oprot): args = getRowRange_args() args.read(iprot) iprot.readMessageEnd() result = getRowRange_result() try: result.success = self._handler.getRowRange(args.row) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getRowRange", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getFollowing(self, seqid, iprot, oprot): args = getFollowing_args() args.read(iprot) iprot.readMessageEnd() result = getFollowing_result() try: result.success = self._handler.getFollowing(args.key, args.part) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getFollowing", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_systemNamespace(self, seqid, iprot, oprot): args = systemNamespace_args() args.read(iprot) iprot.readMessageEnd() result = systemNamespace_result() try: result.success = self._handler.systemNamespace() msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("systemNamespace", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_defaultNamespace(self, seqid, iprot, oprot): args = defaultNamespace_args() args.read(iprot) iprot.readMessageEnd() result = defaultNamespace_result() try: result.success = self._handler.defaultNamespace() msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("defaultNamespace", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_listNamespaces(self, seqid, iprot, oprot): args = listNamespaces_args() args.read(iprot) iprot.readMessageEnd() result = listNamespaces_result() try: result.success = self._handler.listNamespaces(args.login) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("listNamespaces", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_namespaceExists(self, seqid, iprot, oprot): args = namespaceExists_args() args.read(iprot) iprot.readMessageEnd() result = namespaceExists_result() try: result.success = self._handler.namespaceExists(args.login, args.namespaceName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("namespaceExists", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_createNamespace(self, seqid, iprot, oprot): args = createNamespace_args() args.read(iprot) iprot.readMessageEnd() result = createNamespace_result() try: self._handler.createNamespace(args.login, args.namespaceName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceExistsException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("createNamespace", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_deleteNamespace(self, seqid, iprot, oprot): args = deleteNamespace_args() args.read(iprot) iprot.readMessageEnd() result = deleteNamespace_result() try: self._handler.deleteNamespace(args.login, args.namespaceName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except NamespaceNotEmptyException as ouch4: msg_type = TMessageType.REPLY result.ouch4 = ouch4 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("deleteNamespace", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_renameNamespace(self, seqid, iprot, oprot): args = renameNamespace_args() args.read(iprot) iprot.readMessageEnd() result = renameNamespace_result() try: self._handler.renameNamespace(args.login, args.oldNamespaceName, args.newNamespaceName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except NamespaceExistsException as ouch4: msg_type = TMessageType.REPLY result.ouch4 = ouch4 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("renameNamespace", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_setNamespaceProperty(self, seqid, iprot, oprot): args = setNamespaceProperty_args() args.read(iprot) iprot.readMessageEnd() result = setNamespaceProperty_result() try: self._handler.setNamespaceProperty(args.login, args.namespaceName, args.property, args.value) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("setNamespaceProperty", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_removeNamespaceProperty(self, seqid, iprot, oprot): args = removeNamespaceProperty_args() args.read(iprot) iprot.readMessageEnd() result = removeNamespaceProperty_result() try: self._handler.removeNamespaceProperty(args.login, args.namespaceName, args.property) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("removeNamespaceProperty", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getNamespaceProperties(self, seqid, iprot, oprot): args = getNamespaceProperties_args() args.read(iprot) iprot.readMessageEnd() result = getNamespaceProperties_result() try: result.success = self._handler.getNamespaceProperties(args.login, args.namespaceName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getNamespaceProperties", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_namespaceIdMap(self, seqid, iprot, oprot): args = namespaceIdMap_args() args.read(iprot) iprot.readMessageEnd() result = namespaceIdMap_result() try: result.success = self._handler.namespaceIdMap(args.login) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("namespaceIdMap", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_attachNamespaceIterator(self, seqid, iprot, oprot): args = attachNamespaceIterator_args() args.read(iprot) iprot.readMessageEnd() result = attachNamespaceIterator_result() try: self._handler.attachNamespaceIterator(args.login, args.namespaceName, args.setting, args.scopes) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("attachNamespaceIterator", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_removeNamespaceIterator(self, seqid, iprot, oprot): args = removeNamespaceIterator_args() args.read(iprot) iprot.readMessageEnd() result = removeNamespaceIterator_result() try: self._handler.removeNamespaceIterator(args.login, args.namespaceName, args.name, args.scopes) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("removeNamespaceIterator", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_getNamespaceIteratorSetting(self, seqid, iprot, oprot): args = getNamespaceIteratorSetting_args() args.read(iprot) iprot.readMessageEnd() result = getNamespaceIteratorSetting_result() try: result.success = self._handler.getNamespaceIteratorSetting(args.login, args.namespaceName, args.name, args.scope) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("getNamespaceIteratorSetting", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_listNamespaceIterators(self, seqid, iprot, oprot): args = listNamespaceIterators_args() args.read(iprot) iprot.readMessageEnd() result = listNamespaceIterators_result() try: result.success = self._handler.listNamespaceIterators(args.login, args.namespaceName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("listNamespaceIterators", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_checkNamespaceIteratorConflicts(self, seqid, iprot, oprot): args = checkNamespaceIteratorConflicts_args() args.read(iprot) iprot.readMessageEnd() result = checkNamespaceIteratorConflicts_result() try: self._handler.checkNamespaceIteratorConflicts(args.login, args.namespaceName, args.setting, args.scopes) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("checkNamespaceIteratorConflicts", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_addNamespaceConstraint(self, seqid, iprot, oprot): args = addNamespaceConstraint_args() args.read(iprot) iprot.readMessageEnd() result = addNamespaceConstraint_result() try: result.success = self._handler.addNamespaceConstraint(args.login, args.namespaceName, args.constraintClassName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("addNamespaceConstraint", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_removeNamespaceConstraint(self, seqid, iprot, oprot): args = removeNamespaceConstraint_args() args.read(iprot) iprot.readMessageEnd() result = removeNamespaceConstraint_result() try: self._handler.removeNamespaceConstraint(args.login, args.namespaceName, args.id) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("removeNamespaceConstraint", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_listNamespaceConstraints(self, seqid, iprot, oprot): args = listNamespaceConstraints_args() args.read(iprot) iprot.readMessageEnd() result = listNamespaceConstraints_result() try: result.success = self._handler.listNamespaceConstraints(args.login, args.namespaceName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("listNamespaceConstraints", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_testNamespaceClassLoad(self, seqid, iprot, oprot): args = testNamespaceClassLoad_args() args.read(iprot) iprot.readMessageEnd() result = testNamespaceClassLoad_result() try: result.success = self._handler.testNamespaceClassLoad(args.login, args.namespaceName, args.className, args.asTypeName) msg_type = TMessageType.REPLY except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): raise except AccumuloException as ouch1: msg_type = TMessageType.REPLY result.ouch1 = ouch1 except AccumuloSecurityException as ouch2: msg_type = TMessageType.REPLY result.ouch2 = ouch2 except NamespaceNotFoundException as ouch3: msg_type = TMessageType.REPLY result.ouch3 = ouch3 except Exception as ex: msg_type = TMessageType.EXCEPTION logging.exception(ex) result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("testNamespaceClassLoad", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() # HELPER FUNCTIONS AND STRUCTURES class login_args(object): """ Attributes: - principal - loginProperties """ thrift_spec = ( None, # 0 (1, TType.STRING, 'principal', 'UTF8', None, ), # 1 (2, TType.MAP, 'loginProperties', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 2 ) def __init__(self, principal=None, loginProperties=None,): self.principal = principal self.loginProperties = loginProperties def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.principal = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.MAP: self.loginProperties = {} (_ktype145, _vtype146, _size144) = iprot.readMapBegin() for _i148 in range(_size144): _key149 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val150 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.loginProperties[_key149] = _val150 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('login_args') if self.principal is not None: oprot.writeFieldBegin('principal', TType.STRING, 1) oprot.writeString(self.principal.encode('utf-8') if sys.version_info[0] == 2 else self.principal) oprot.writeFieldEnd() if self.loginProperties is not None: oprot.writeFieldBegin('loginProperties', TType.MAP, 2) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.loginProperties)) for kiter151, viter152 in self.loginProperties.items(): oprot.writeString(kiter151.encode('utf-8') if sys.version_info[0] == 2 else kiter151) oprot.writeString(viter152.encode('utf-8') if sys.version_info[0] == 2 else viter152) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class login_result(object): """ Attributes: - success - ouch2 """ thrift_spec = ( (0, TType.STRING, 'success', 'BINARY', None, ), # 0 (1, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ouch2=None,): self.success = success self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readBinary() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('login_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeBinary(self.success) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 1) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class addConstraint_args(object): """ Attributes: - login - tableName - constraintClassName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'constraintClassName', 'UTF8', None, ), # 3 ) def __init__(self, login=None, tableName=None, constraintClassName=None,): self.login = login self.tableName = tableName self.constraintClassName = constraintClassName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.constraintClassName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('addConstraint_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.constraintClassName is not None: oprot.writeFieldBegin('constraintClassName', TType.STRING, 3) oprot.writeString(self.constraintClassName.encode('utf-8') if sys.version_info[0] == 2 else self.constraintClassName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class addConstraint_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.I32, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.I32: self.success = iprot.readI32() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('addConstraint_result') if self.success is not None: oprot.writeFieldBegin('success', TType.I32, 0) oprot.writeI32(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class addSplits_args(object): """ Attributes: - login - tableName - splits """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.SET, 'splits', (TType.STRING, 'BINARY', False), None, ), # 3 ) def __init__(self, login=None, tableName=None, splits=None,): self.login = login self.tableName = tableName self.splits = splits def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.SET: self.splits = set() (_etype156, _size153) = iprot.readSetBegin() for _i157 in range(_size153): _elem158 = iprot.readBinary() self.splits.add(_elem158) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('addSplits_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.splits is not None: oprot.writeFieldBegin('splits', TType.SET, 3) oprot.writeSetBegin(TType.STRING, len(self.splits)) for iter159 in self.splits: oprot.writeBinary(iter159) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class addSplits_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('addSplits_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class attachIterator_args(object): """ Attributes: - login - tableName - setting - scopes """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'setting', (IteratorSetting, IteratorSetting.thrift_spec), None, ), # 3 (4, TType.SET, 'scopes', (TType.I32, None, False), None, ), # 4 ) def __init__(self, login=None, tableName=None, setting=None, scopes=None,): self.login = login self.tableName = tableName self.setting = setting self.scopes = scopes def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.setting = IteratorSetting() self.setting.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.SET: self.scopes = set() (_etype163, _size160) = iprot.readSetBegin() for _i164 in range(_size160): _elem165 = iprot.readI32() self.scopes.add(_elem165) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('attachIterator_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.setting is not None: oprot.writeFieldBegin('setting', TType.STRUCT, 3) self.setting.write(oprot) oprot.writeFieldEnd() if self.scopes is not None: oprot.writeFieldBegin('scopes', TType.SET, 4) oprot.writeSetBegin(TType.I32, len(self.scopes)) for iter166 in self.scopes: oprot.writeI32(iter166) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class attachIterator_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloException, AccumuloException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloSecurityException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('attachIterator_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class checkIteratorConflicts_args(object): """ Attributes: - login - tableName - setting - scopes """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'setting', (IteratorSetting, IteratorSetting.thrift_spec), None, ), # 3 (4, TType.SET, 'scopes', (TType.I32, None, False), None, ), # 4 ) def __init__(self, login=None, tableName=None, setting=None, scopes=None,): self.login = login self.tableName = tableName self.setting = setting self.scopes = scopes def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.setting = IteratorSetting() self.setting.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.SET: self.scopes = set() (_etype170, _size167) = iprot.readSetBegin() for _i171 in range(_size167): _elem172 = iprot.readI32() self.scopes.add(_elem172) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('checkIteratorConflicts_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.setting is not None: oprot.writeFieldBegin('setting', TType.STRUCT, 3) self.setting.write(oprot) oprot.writeFieldEnd() if self.scopes is not None: oprot.writeFieldBegin('scopes', TType.SET, 4) oprot.writeSetBegin(TType.I32, len(self.scopes)) for iter173 in self.scopes: oprot.writeI32(iter173) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class checkIteratorConflicts_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloException, AccumuloException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloSecurityException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('checkIteratorConflicts_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class clearLocatorCache_args(object): """ Attributes: - login - tableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tableName=None,): self.login = login self.tableName = tableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('clearLocatorCache_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class clearLocatorCache_result(object): """ Attributes: - ouch1 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 1 ) def __init__(self, ouch1=None,): self.ouch1 = ouch1 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = TableNotFoundException() self.ouch1.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('clearLocatorCache_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class cloneTable_args(object): """ Attributes: - login - tableName - newTableName - flush - propertiesToSet - propertiesToExclude """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'newTableName', 'UTF8', None, ), # 3 (4, TType.BOOL, 'flush', None, None, ), # 4 (5, TType.MAP, 'propertiesToSet', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 5 (6, TType.SET, 'propertiesToExclude', (TType.STRING, 'UTF8', False), None, ), # 6 ) def __init__(self, login=None, tableName=None, newTableName=None, flush=None, propertiesToSet=None, propertiesToExclude=None,): self.login = login self.tableName = tableName self.newTableName = newTableName self.flush = flush self.propertiesToSet = propertiesToSet self.propertiesToExclude = propertiesToExclude def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.newTableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.BOOL: self.flush = iprot.readBool() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.MAP: self.propertiesToSet = {} (_ktype175, _vtype176, _size174) = iprot.readMapBegin() for _i178 in range(_size174): _key179 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val180 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.propertiesToSet[_key179] = _val180 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 6: if ftype == TType.SET: self.propertiesToExclude = set() (_etype184, _size181) = iprot.readSetBegin() for _i185 in range(_size181): _elem186 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.propertiesToExclude.add(_elem186) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('cloneTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.newTableName is not None: oprot.writeFieldBegin('newTableName', TType.STRING, 3) oprot.writeString(self.newTableName.encode('utf-8') if sys.version_info[0] == 2 else self.newTableName) oprot.writeFieldEnd() if self.flush is not None: oprot.writeFieldBegin('flush', TType.BOOL, 4) oprot.writeBool(self.flush) oprot.writeFieldEnd() if self.propertiesToSet is not None: oprot.writeFieldBegin('propertiesToSet', TType.MAP, 5) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.propertiesToSet)) for kiter187, viter188 in self.propertiesToSet.items(): oprot.writeString(kiter187.encode('utf-8') if sys.version_info[0] == 2 else kiter187) oprot.writeString(viter188.encode('utf-8') if sys.version_info[0] == 2 else viter188) oprot.writeMapEnd() oprot.writeFieldEnd() if self.propertiesToExclude is not None: oprot.writeFieldBegin('propertiesToExclude', TType.SET, 6) oprot.writeSetBegin(TType.STRING, len(self.propertiesToExclude)) for iter189 in self.propertiesToExclude: oprot.writeString(iter189.encode('utf-8') if sys.version_info[0] == 2 else iter189) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class cloneTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 - ouch4 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 (4, TType.STRUCT, 'ouch4', (TableExistsException, TableExistsException.thrift_spec), None, ), # 4 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None, ouch4=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 self.ouch4 = ouch4 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.ouch4 = TableExistsException() self.ouch4.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('cloneTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() if self.ouch4 is not None: oprot.writeFieldBegin('ouch4', TType.STRUCT, 4) self.ouch4.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class compactTable_args(object): """ Attributes: - login - tableName - startRow - endRow - iterators - flush - wait - compactionStrategy """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'startRow', 'BINARY', None, ), # 3 (4, TType.STRING, 'endRow', 'BINARY', None, ), # 4 (5, TType.LIST, 'iterators', (TType.STRUCT, (IteratorSetting, IteratorSetting.thrift_spec), False), None, ), # 5 (6, TType.BOOL, 'flush', None, None, ), # 6 (7, TType.BOOL, 'wait', None, None, ), # 7 (8, TType.STRUCT, 'compactionStrategy', (CompactionStrategyConfig, CompactionStrategyConfig.thrift_spec), None, ), # 8 ) def __init__(self, login=None, tableName=None, startRow=None, endRow=None, iterators=None, flush=None, wait=None, compactionStrategy=None,): self.login = login self.tableName = tableName self.startRow = startRow self.endRow = endRow self.iterators = iterators self.flush = flush self.wait = wait self.compactionStrategy = compactionStrategy def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.startRow = iprot.readBinary() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.endRow = iprot.readBinary() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.LIST: self.iterators = [] (_etype193, _size190) = iprot.readListBegin() for _i194 in range(_size190): _elem195 = IteratorSetting() _elem195.read(iprot) self.iterators.append(_elem195) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 6: if ftype == TType.BOOL: self.flush = iprot.readBool() else: iprot.skip(ftype) elif fid == 7: if ftype == TType.BOOL: self.wait = iprot.readBool() else: iprot.skip(ftype) elif fid == 8: if ftype == TType.STRUCT: self.compactionStrategy = CompactionStrategyConfig() self.compactionStrategy.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('compactTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.startRow is not None: oprot.writeFieldBegin('startRow', TType.STRING, 3) oprot.writeBinary(self.startRow) oprot.writeFieldEnd() if self.endRow is not None: oprot.writeFieldBegin('endRow', TType.STRING, 4) oprot.writeBinary(self.endRow) oprot.writeFieldEnd() if self.iterators is not None: oprot.writeFieldBegin('iterators', TType.LIST, 5) oprot.writeListBegin(TType.STRUCT, len(self.iterators)) for iter196 in self.iterators: iter196.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.flush is not None: oprot.writeFieldBegin('flush', TType.BOOL, 6) oprot.writeBool(self.flush) oprot.writeFieldEnd() if self.wait is not None: oprot.writeFieldBegin('wait', TType.BOOL, 7) oprot.writeBool(self.wait) oprot.writeFieldEnd() if self.compactionStrategy is not None: oprot.writeFieldBegin('compactionStrategy', TType.STRUCT, 8) self.compactionStrategy.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class compactTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (AccumuloException, AccumuloException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloSecurityException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = TableNotFoundException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = AccumuloException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('compactTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class cancelCompaction_args(object): """ Attributes: - login - tableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tableName=None,): self.login = login self.tableName = tableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('cancelCompaction_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class cancelCompaction_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (AccumuloException, AccumuloException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloSecurityException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = TableNotFoundException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = AccumuloException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('cancelCompaction_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createTable_args(object): """ Attributes: - login - tableName - versioningIter - type """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.BOOL, 'versioningIter', None, None, ), # 3 (4, TType.I32, 'type', None, None, ), # 4 ) def __init__(self, login=None, tableName=None, versioningIter=None, type=None,): self.login = login self.tableName = tableName self.versioningIter = versioningIter self.type = type def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.BOOL: self.versioningIter = iprot.readBool() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.type = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.versioningIter is not None: oprot.writeFieldBegin('versioningIter', TType.BOOL, 3) oprot.writeBool(self.versioningIter) oprot.writeFieldEnd() if self.type is not None: oprot.writeFieldBegin('type', TType.I32, 4) oprot.writeI32(self.type) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableExistsException, TableExistsException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableExistsException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class deleteTable_args(object): """ Attributes: - login - tableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tableName=None,): self.login = login self.tableName = tableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('deleteTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class deleteTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('deleteTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class deleteRows_args(object): """ Attributes: - login - tableName - startRow - endRow """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'startRow', 'BINARY', None, ), # 3 (4, TType.STRING, 'endRow', 'BINARY', None, ), # 4 ) def __init__(self, login=None, tableName=None, startRow=None, endRow=None,): self.login = login self.tableName = tableName self.startRow = startRow self.endRow = endRow def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.startRow = iprot.readBinary() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.endRow = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('deleteRows_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.startRow is not None: oprot.writeFieldBegin('startRow', TType.STRING, 3) oprot.writeBinary(self.startRow) oprot.writeFieldEnd() if self.endRow is not None: oprot.writeFieldBegin('endRow', TType.STRING, 4) oprot.writeBinary(self.endRow) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class deleteRows_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('deleteRows_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class exportTable_args(object): """ Attributes: - login - tableName - exportDir """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'exportDir', 'UTF8', None, ), # 3 ) def __init__(self, login=None, tableName=None, exportDir=None,): self.login = login self.tableName = tableName self.exportDir = exportDir def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.exportDir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('exportTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.exportDir is not None: oprot.writeFieldBegin('exportDir', TType.STRING, 3) oprot.writeString(self.exportDir.encode('utf-8') if sys.version_info[0] == 2 else self.exportDir) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class exportTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('exportTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class flushTable_args(object): """ Attributes: - login - tableName - startRow - endRow - wait """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'startRow', 'BINARY', None, ), # 3 (4, TType.STRING, 'endRow', 'BINARY', None, ), # 4 (5, TType.BOOL, 'wait', None, None, ), # 5 ) def __init__(self, login=None, tableName=None, startRow=None, endRow=None, wait=None,): self.login = login self.tableName = tableName self.startRow = startRow self.endRow = endRow self.wait = wait def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.startRow = iprot.readBinary() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.endRow = iprot.readBinary() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.BOOL: self.wait = iprot.readBool() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('flushTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.startRow is not None: oprot.writeFieldBegin('startRow', TType.STRING, 3) oprot.writeBinary(self.startRow) oprot.writeFieldEnd() if self.endRow is not None: oprot.writeFieldBegin('endRow', TType.STRING, 4) oprot.writeBinary(self.endRow) oprot.writeFieldEnd() if self.wait is not None: oprot.writeFieldBegin('wait', TType.BOOL, 5) oprot.writeBool(self.wait) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class flushTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('flushTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getDiskUsage_args(object): """ Attributes: - login - tables """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.SET, 'tables', (TType.STRING, 'UTF8', False), None, ), # 2 ) def __init__(self, login=None, tables=None,): self.login = login self.tables = tables def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.SET: self.tables = set() (_etype200, _size197) = iprot.readSetBegin() for _i201 in range(_size197): _elem202 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.tables.add(_elem202) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getDiskUsage_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tables is not None: oprot.writeFieldBegin('tables', TType.SET, 2) oprot.writeSetBegin(TType.STRING, len(self.tables)) for iter203 in self.tables: oprot.writeString(iter203.encode('utf-8') if sys.version_info[0] == 2 else iter203) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getDiskUsage_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRUCT, (DiskUsage, DiskUsage.thrift_spec), False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype207, _size204) = iprot.readListBegin() for _i208 in range(_size204): _elem209 = DiskUsage() _elem209.read(iprot) self.success.append(_elem209) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getDiskUsage_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRUCT, len(self.success)) for iter210 in self.success: iter210.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getLocalityGroups_args(object): """ Attributes: - login - tableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tableName=None,): self.login = login self.tableName = tableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getLocalityGroups_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getLocalityGroups_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.SET, (TType.STRING, 'UTF8', False), False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype212, _vtype213, _size211) = iprot.readMapBegin() for _i215 in range(_size211): _key216 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val217 = set() (_etype221, _size218) = iprot.readSetBegin() for _i222 in range(_size218): _elem223 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val217.add(_elem223) iprot.readSetEnd() self.success[_key216] = _val217 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getLocalityGroups_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.SET, len(self.success)) for kiter224, viter225 in self.success.items(): oprot.writeString(kiter224.encode('utf-8') if sys.version_info[0] == 2 else kiter224) oprot.writeSetBegin(TType.STRING, len(viter225)) for iter226 in viter225: oprot.writeString(iter226.encode('utf-8') if sys.version_info[0] == 2 else iter226) oprot.writeSetEnd() oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getIteratorSetting_args(object): """ Attributes: - login - tableName - iteratorName - scope """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'iteratorName', 'UTF8', None, ), # 3 (4, TType.I32, 'scope', None, None, ), # 4 ) def __init__(self, login=None, tableName=None, iteratorName=None, scope=None,): self.login = login self.tableName = tableName self.iteratorName = iteratorName self.scope = scope def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.iteratorName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.scope = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getIteratorSetting_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.iteratorName is not None: oprot.writeFieldBegin('iteratorName', TType.STRING, 3) oprot.writeString(self.iteratorName.encode('utf-8') if sys.version_info[0] == 2 else self.iteratorName) oprot.writeFieldEnd() if self.scope is not None: oprot.writeFieldBegin('scope', TType.I32, 4) oprot.writeI32(self.scope) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getIteratorSetting_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRUCT, 'success', (IteratorSetting, IteratorSetting.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = IteratorSetting() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getIteratorSetting_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getMaxRow_args(object): """ Attributes: - login - tableName - auths - startRow - startInclusive - endRow - endInclusive """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.SET, 'auths', (TType.STRING, 'BINARY', False), None, ), # 3 (4, TType.STRING, 'startRow', 'BINARY', None, ), # 4 (5, TType.BOOL, 'startInclusive', None, None, ), # 5 (6, TType.STRING, 'endRow', 'BINARY', None, ), # 6 (7, TType.BOOL, 'endInclusive', None, None, ), # 7 ) def __init__(self, login=None, tableName=None, auths=None, startRow=None, startInclusive=None, endRow=None, endInclusive=None,): self.login = login self.tableName = tableName self.auths = auths self.startRow = startRow self.startInclusive = startInclusive self.endRow = endRow self.endInclusive = endInclusive def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.SET: self.auths = set() (_etype230, _size227) = iprot.readSetBegin() for _i231 in range(_size227): _elem232 = iprot.readBinary() self.auths.add(_elem232) iprot.readSetEnd() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.startRow = iprot.readBinary() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.BOOL: self.startInclusive = iprot.readBool() else: iprot.skip(ftype) elif fid == 6: if ftype == TType.STRING: self.endRow = iprot.readBinary() else: iprot.skip(ftype) elif fid == 7: if ftype == TType.BOOL: self.endInclusive = iprot.readBool() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getMaxRow_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.auths is not None: oprot.writeFieldBegin('auths', TType.SET, 3) oprot.writeSetBegin(TType.STRING, len(self.auths)) for iter233 in self.auths: oprot.writeBinary(iter233) oprot.writeSetEnd() oprot.writeFieldEnd() if self.startRow is not None: oprot.writeFieldBegin('startRow', TType.STRING, 4) oprot.writeBinary(self.startRow) oprot.writeFieldEnd() if self.startInclusive is not None: oprot.writeFieldBegin('startInclusive', TType.BOOL, 5) oprot.writeBool(self.startInclusive) oprot.writeFieldEnd() if self.endRow is not None: oprot.writeFieldBegin('endRow', TType.STRING, 6) oprot.writeBinary(self.endRow) oprot.writeFieldEnd() if self.endInclusive is not None: oprot.writeFieldBegin('endInclusive', TType.BOOL, 7) oprot.writeBool(self.endInclusive) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getMaxRow_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRING, 'success', 'BINARY', None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readBinary() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getMaxRow_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeBinary(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getTableProperties_args(object): """ Attributes: - login - tableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tableName=None,): self.login = login self.tableName = tableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getTableProperties_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getTableProperties_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype235, _vtype236, _size234) = iprot.readMapBegin() for _i238 in range(_size234): _key239 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val240 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success[_key239] = _val240 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getTableProperties_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success)) for kiter241, viter242 in self.success.items(): oprot.writeString(kiter241.encode('utf-8') if sys.version_info[0] == 2 else kiter241) oprot.writeString(viter242.encode('utf-8') if sys.version_info[0] == 2 else viter242) oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class importDirectory_args(object): """ Attributes: - login - tableName - importDir - failureDir - setTime """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'importDir', 'UTF8', None, ), # 3 (4, TType.STRING, 'failureDir', 'UTF8', None, ), # 4 (5, TType.BOOL, 'setTime', None, None, ), # 5 ) def __init__(self, login=None, tableName=None, importDir=None, failureDir=None, setTime=None,): self.login = login self.tableName = tableName self.importDir = importDir self.failureDir = failureDir self.setTime = setTime def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.importDir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.failureDir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.BOOL: self.setTime = iprot.readBool() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('importDirectory_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.importDir is not None: oprot.writeFieldBegin('importDir', TType.STRING, 3) oprot.writeString(self.importDir.encode('utf-8') if sys.version_info[0] == 2 else self.importDir) oprot.writeFieldEnd() if self.failureDir is not None: oprot.writeFieldBegin('failureDir', TType.STRING, 4) oprot.writeString(self.failureDir.encode('utf-8') if sys.version_info[0] == 2 else self.failureDir) oprot.writeFieldEnd() if self.setTime is not None: oprot.writeFieldBegin('setTime', TType.BOOL, 5) oprot.writeBool(self.setTime) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class importDirectory_result(object): """ Attributes: - ouch1 - ouch3 - ouch4 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch3', (AccumuloException, AccumuloException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch4', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch3=None, ouch4=None,): self.ouch1 = ouch1 self.ouch3 = ouch3 self.ouch4 = ouch4 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = TableNotFoundException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch3 = AccumuloException() self.ouch3.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch4 = AccumuloSecurityException() self.ouch4.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('importDirectory_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 2) self.ouch3.write(oprot) oprot.writeFieldEnd() if self.ouch4 is not None: oprot.writeFieldBegin('ouch4', TType.STRUCT, 3) self.ouch4.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class importTable_args(object): """ Attributes: - login - tableName - importDir """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'importDir', 'UTF8', None, ), # 3 ) def __init__(self, login=None, tableName=None, importDir=None,): self.login = login self.tableName = tableName self.importDir = importDir def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.importDir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('importTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.importDir is not None: oprot.writeFieldBegin('importDir', TType.STRING, 3) oprot.writeString(self.importDir.encode('utf-8') if sys.version_info[0] == 2 else self.importDir) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class importTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (TableExistsException, TableExistsException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloException, AccumuloException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = TableExistsException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = AccumuloSecurityException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('importTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listSplits_args(object): """ Attributes: - login - tableName - maxSplits """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.I32, 'maxSplits', None, None, ), # 3 ) def __init__(self, login=None, tableName=None, maxSplits=None,): self.login = login self.tableName = tableName self.maxSplits = maxSplits def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.maxSplits = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listSplits_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.maxSplits is not None: oprot.writeFieldBegin('maxSplits', TType.I32, 3) oprot.writeI32(self.maxSplits) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listSplits_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING, 'BINARY', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype246, _size243) = iprot.readListBegin() for _i247 in range(_size243): _elem248 = iprot.readBinary() self.success.append(_elem248) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listSplits_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRING, len(self.success)) for iter249 in self.success: oprot.writeBinary(iter249) oprot.writeListEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listTables_args(object): """ Attributes: - login """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 ) def __init__(self, login=None,): self.login = login def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listTables_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listTables_result(object): """ Attributes: - success """ thrift_spec = ( (0, TType.SET, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 ) def __init__(self, success=None,): self.success = success def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.SET: self.success = set() (_etype253, _size250) = iprot.readSetBegin() for _i254 in range(_size250): _elem255 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success.add(_elem255) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listTables_result') if self.success is not None: oprot.writeFieldBegin('success', TType.SET, 0) oprot.writeSetBegin(TType.STRING, len(self.success)) for iter256 in self.success: oprot.writeString(iter256.encode('utf-8') if sys.version_info[0] == 2 else iter256) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listIterators_args(object): """ Attributes: - login - tableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tableName=None,): self.login = login self.tableName = tableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listIterators_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listIterators_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.SET, (TType.I32, None, False), False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype258, _vtype259, _size257) = iprot.readMapBegin() for _i261 in range(_size257): _key262 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val263 = set() (_etype267, _size264) = iprot.readSetBegin() for _i268 in range(_size264): _elem269 = iprot.readI32() _val263.add(_elem269) iprot.readSetEnd() self.success[_key262] = _val263 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listIterators_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.SET, len(self.success)) for kiter270, viter271 in self.success.items(): oprot.writeString(kiter270.encode('utf-8') if sys.version_info[0] == 2 else kiter270) oprot.writeSetBegin(TType.I32, len(viter271)) for iter272 in viter271: oprot.writeI32(iter272) oprot.writeSetEnd() oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listConstraints_args(object): """ Attributes: - login - tableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tableName=None,): self.login = login self.tableName = tableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listConstraints_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listConstraints_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.I32, None, False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype274, _vtype275, _size273) = iprot.readMapBegin() for _i277 in range(_size273): _key278 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val279 = iprot.readI32() self.success[_key278] = _val279 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listConstraints_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.I32, len(self.success)) for kiter280, viter281 in self.success.items(): oprot.writeString(kiter280.encode('utf-8') if sys.version_info[0] == 2 else kiter280) oprot.writeI32(viter281) oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class mergeTablets_args(object): """ Attributes: - login - tableName - startRow - endRow """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'startRow', 'BINARY', None, ), # 3 (4, TType.STRING, 'endRow', 'BINARY', None, ), # 4 ) def __init__(self, login=None, tableName=None, startRow=None, endRow=None,): self.login = login self.tableName = tableName self.startRow = startRow self.endRow = endRow def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.startRow = iprot.readBinary() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.endRow = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('mergeTablets_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.startRow is not None: oprot.writeFieldBegin('startRow', TType.STRING, 3) oprot.writeBinary(self.startRow) oprot.writeFieldEnd() if self.endRow is not None: oprot.writeFieldBegin('endRow', TType.STRING, 4) oprot.writeBinary(self.endRow) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class mergeTablets_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('mergeTablets_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class offlineTable_args(object): """ Attributes: - login - tableName - wait """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.BOOL, 'wait', None, False, ), # 3 ) def __init__(self, login=None, tableName=None, wait=thrift_spec[3][4],): self.login = login self.tableName = tableName self.wait = wait def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.BOOL: self.wait = iprot.readBool() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('offlineTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.wait is not None: oprot.writeFieldBegin('wait', TType.BOOL, 3) oprot.writeBool(self.wait) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class offlineTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('offlineTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class onlineTable_args(object): """ Attributes: - login - tableName - wait """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.BOOL, 'wait', None, False, ), # 3 ) def __init__(self, login=None, tableName=None, wait=thrift_spec[3][4],): self.login = login self.tableName = tableName self.wait = wait def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.BOOL: self.wait = iprot.readBool() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('onlineTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.wait is not None: oprot.writeFieldBegin('wait', TType.BOOL, 3) oprot.writeBool(self.wait) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class onlineTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('onlineTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeConstraint_args(object): """ Attributes: - login - tableName - constraint """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.I32, 'constraint', None, None, ), # 3 ) def __init__(self, login=None, tableName=None, constraint=None,): self.login = login self.tableName = tableName self.constraint = constraint def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.constraint = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeConstraint_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.constraint is not None: oprot.writeFieldBegin('constraint', TType.I32, 3) oprot.writeI32(self.constraint) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeConstraint_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeConstraint_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeIterator_args(object): """ Attributes: - login - tableName - iterName - scopes """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'iterName', 'UTF8', None, ), # 3 (4, TType.SET, 'scopes', (TType.I32, None, False), None, ), # 4 ) def __init__(self, login=None, tableName=None, iterName=None, scopes=None,): self.login = login self.tableName = tableName self.iterName = iterName self.scopes = scopes def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.iterName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.SET: self.scopes = set() (_etype285, _size282) = iprot.readSetBegin() for _i286 in range(_size282): _elem287 = iprot.readI32() self.scopes.add(_elem287) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeIterator_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.iterName is not None: oprot.writeFieldBegin('iterName', TType.STRING, 3) oprot.writeString(self.iterName.encode('utf-8') if sys.version_info[0] == 2 else self.iterName) oprot.writeFieldEnd() if self.scopes is not None: oprot.writeFieldBegin('scopes', TType.SET, 4) oprot.writeSetBegin(TType.I32, len(self.scopes)) for iter288 in self.scopes: oprot.writeI32(iter288) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeIterator_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeIterator_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeTableProperty_args(object): """ Attributes: - login - tableName - property """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'property', 'UTF8', None, ), # 3 ) def __init__(self, login=None, tableName=None, property=None,): self.login = login self.tableName = tableName self.property = property def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.property = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeTableProperty_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.property is not None: oprot.writeFieldBegin('property', TType.STRING, 3) oprot.writeString(self.property.encode('utf-8') if sys.version_info[0] == 2 else self.property) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeTableProperty_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeTableProperty_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class renameTable_args(object): """ Attributes: - login - oldTableName - newTableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'oldTableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'newTableName', 'UTF8', None, ), # 3 ) def __init__(self, login=None, oldTableName=None, newTableName=None,): self.login = login self.oldTableName = oldTableName self.newTableName = newTableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.oldTableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.newTableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('renameTable_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.oldTableName is not None: oprot.writeFieldBegin('oldTableName', TType.STRING, 2) oprot.writeString(self.oldTableName.encode('utf-8') if sys.version_info[0] == 2 else self.oldTableName) oprot.writeFieldEnd() if self.newTableName is not None: oprot.writeFieldBegin('newTableName', TType.STRING, 3) oprot.writeString(self.newTableName.encode('utf-8') if sys.version_info[0] == 2 else self.newTableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class renameTable_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 - ouch4 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 (4, TType.STRUCT, 'ouch4', (TableExistsException, TableExistsException.thrift_spec), None, ), # 4 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None, ouch4=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 self.ouch4 = ouch4 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.ouch4 = TableExistsException() self.ouch4.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('renameTable_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() if self.ouch4 is not None: oprot.writeFieldBegin('ouch4', TType.STRUCT, 4) self.ouch4.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class setLocalityGroups_args(object): """ Attributes: - login - tableName - groups """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.MAP, 'groups', (TType.STRING, 'UTF8', TType.SET, (TType.STRING, 'UTF8', False), False), None, ), # 3 ) def __init__(self, login=None, tableName=None, groups=None,): self.login = login self.tableName = tableName self.groups = groups def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.MAP: self.groups = {} (_ktype290, _vtype291, _size289) = iprot.readMapBegin() for _i293 in range(_size289): _key294 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val295 = set() (_etype299, _size296) = iprot.readSetBegin() for _i300 in range(_size296): _elem301 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val295.add(_elem301) iprot.readSetEnd() self.groups[_key294] = _val295 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setLocalityGroups_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.groups is not None: oprot.writeFieldBegin('groups', TType.MAP, 3) oprot.writeMapBegin(TType.STRING, TType.SET, len(self.groups)) for kiter302, viter303 in self.groups.items(): oprot.writeString(kiter302.encode('utf-8') if sys.version_info[0] == 2 else kiter302) oprot.writeSetBegin(TType.STRING, len(viter303)) for iter304 in viter303: oprot.writeString(iter304.encode('utf-8') if sys.version_info[0] == 2 else iter304) oprot.writeSetEnd() oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class setLocalityGroups_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setLocalityGroups_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class setTableProperty_args(object): """ Attributes: - login - tableName - property - value """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'property', 'UTF8', None, ), # 3 (4, TType.STRING, 'value', 'UTF8', None, ), # 4 ) def __init__(self, login=None, tableName=None, property=None, value=None,): self.login = login self.tableName = tableName self.property = property self.value = value def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.property = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.value = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setTableProperty_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.property is not None: oprot.writeFieldBegin('property', TType.STRING, 3) oprot.writeString(self.property.encode('utf-8') if sys.version_info[0] == 2 else self.property) oprot.writeFieldEnd() if self.value is not None: oprot.writeFieldBegin('value', TType.STRING, 4) oprot.writeString(self.value.encode('utf-8') if sys.version_info[0] == 2 else self.value) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class setTableProperty_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setTableProperty_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class splitRangeByTablets_args(object): """ Attributes: - login - tableName - range - maxSplits """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'range', (Range, Range.thrift_spec), None, ), # 3 (4, TType.I32, 'maxSplits', None, None, ), # 4 ) def __init__(self, login=None, tableName=None, range=None, maxSplits=None,): self.login = login self.tableName = tableName self.range = range self.maxSplits = maxSplits def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.range = Range() self.range.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.maxSplits = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('splitRangeByTablets_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.range is not None: oprot.writeFieldBegin('range', TType.STRUCT, 3) self.range.write(oprot) oprot.writeFieldEnd() if self.maxSplits is not None: oprot.writeFieldBegin('maxSplits', TType.I32, 4) oprot.writeI32(self.maxSplits) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class splitRangeByTablets_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.SET, 'success', (TType.STRUCT, (Range, Range.thrift_spec), False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.SET: self.success = set() (_etype308, _size305) = iprot.readSetBegin() for _i309 in range(_size305): _elem310 = Range() _elem310.read(iprot) self.success.add(_elem310) iprot.readSetEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('splitRangeByTablets_result') if self.success is not None: oprot.writeFieldBegin('success', TType.SET, 0) oprot.writeSetBegin(TType.STRUCT, len(self.success)) for iter311 in self.success: iter311.write(oprot) oprot.writeSetEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class tableExists_args(object): """ Attributes: - login - tableName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tableName=None,): self.login = login self.tableName = tableName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('tableExists_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class tableExists_result(object): """ Attributes: - success """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 ) def __init__(self, success=None,): self.success = success def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('tableExists_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class tableIdMap_args(object): """ Attributes: - login """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 ) def __init__(self, login=None,): self.login = login def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('tableIdMap_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class tableIdMap_result(object): """ Attributes: - success """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0 ) def __init__(self, success=None,): self.success = success def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype313, _vtype314, _size312) = iprot.readMapBegin() for _i316 in range(_size312): _key317 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val318 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success[_key317] = _val318 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('tableIdMap_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success)) for kiter319, viter320 in self.success.items(): oprot.writeString(kiter319.encode('utf-8') if sys.version_info[0] == 2 else kiter319) oprot.writeString(viter320.encode('utf-8') if sys.version_info[0] == 2 else viter320) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class testTableClassLoad_args(object): """ Attributes: - login - tableName - className - asTypeName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'className', 'UTF8', None, ), # 3 (4, TType.STRING, 'asTypeName', 'UTF8', None, ), # 4 ) def __init__(self, login=None, tableName=None, className=None, asTypeName=None,): self.login = login self.tableName = tableName self.className = className self.asTypeName = asTypeName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.className = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.asTypeName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('testTableClassLoad_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.className is not None: oprot.writeFieldBegin('className', TType.STRING, 3) oprot.writeString(self.className.encode('utf-8') if sys.version_info[0] == 2 else self.className) oprot.writeFieldEnd() if self.asTypeName is not None: oprot.writeFieldBegin('asTypeName', TType.STRING, 4) oprot.writeString(self.asTypeName.encode('utf-8') if sys.version_info[0] == 2 else self.asTypeName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class testTableClassLoad_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('testTableClassLoad_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class pingTabletServer_args(object): """ Attributes: - login - tserver """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tserver', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tserver=None,): self.login = login self.tserver = tserver def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tserver = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('pingTabletServer_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tserver is not None: oprot.writeFieldBegin('tserver', TType.STRING, 2) oprot.writeString(self.tserver.encode('utf-8') if sys.version_info[0] == 2 else self.tserver) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class pingTabletServer_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('pingTabletServer_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getActiveScans_args(object): """ Attributes: - login - tserver """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tserver', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tserver=None,): self.login = login self.tserver = tserver def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tserver = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getActiveScans_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tserver is not None: oprot.writeFieldBegin('tserver', TType.STRING, 2) oprot.writeString(self.tserver.encode('utf-8') if sys.version_info[0] == 2 else self.tserver) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getActiveScans_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRUCT, (ActiveScan, ActiveScan.thrift_spec), False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype324, _size321) = iprot.readListBegin() for _i325 in range(_size321): _elem326 = ActiveScan() _elem326.read(iprot) self.success.append(_elem326) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getActiveScans_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRUCT, len(self.success)) for iter327 in self.success: iter327.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getActiveCompactions_args(object): """ Attributes: - login - tserver """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tserver', 'UTF8', None, ), # 2 ) def __init__(self, login=None, tserver=None,): self.login = login self.tserver = tserver def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tserver = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getActiveCompactions_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tserver is not None: oprot.writeFieldBegin('tserver', TType.STRING, 2) oprot.writeString(self.tserver.encode('utf-8') if sys.version_info[0] == 2 else self.tserver) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getActiveCompactions_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRUCT, (ActiveCompaction, ActiveCompaction.thrift_spec), False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype331, _size328) = iprot.readListBegin() for _i332 in range(_size328): _elem333 = ActiveCompaction() _elem333.read(iprot) self.success.append(_elem333) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getActiveCompactions_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRUCT, len(self.success)) for iter334 in self.success: iter334.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getSiteConfiguration_args(object): """ Attributes: - login """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 ) def __init__(self, login=None,): self.login = login def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getSiteConfiguration_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getSiteConfiguration_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype336, _vtype337, _size335) = iprot.readMapBegin() for _i339 in range(_size335): _key340 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val341 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success[_key340] = _val341 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getSiteConfiguration_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success)) for kiter342, viter343 in self.success.items(): oprot.writeString(kiter342.encode('utf-8') if sys.version_info[0] == 2 else kiter342) oprot.writeString(viter343.encode('utf-8') if sys.version_info[0] == 2 else viter343) oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getSystemConfiguration_args(object): """ Attributes: - login """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 ) def __init__(self, login=None,): self.login = login def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getSystemConfiguration_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getSystemConfiguration_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype345, _vtype346, _size344) = iprot.readMapBegin() for _i348 in range(_size344): _key349 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val350 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success[_key349] = _val350 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getSystemConfiguration_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success)) for kiter351, viter352 in self.success.items(): oprot.writeString(kiter351.encode('utf-8') if sys.version_info[0] == 2 else kiter351) oprot.writeString(viter352.encode('utf-8') if sys.version_info[0] == 2 else viter352) oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getTabletServers_args(object): """ Attributes: - login """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 ) def __init__(self, login=None,): self.login = login def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getTabletServers_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getTabletServers_result(object): """ Attributes: - success """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 ) def __init__(self, success=None,): self.success = success def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype356, _size353) = iprot.readListBegin() for _i357 in range(_size353): _elem358 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success.append(_elem358) iprot.readListEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getTabletServers_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRING, len(self.success)) for iter359 in self.success: oprot.writeString(iter359.encode('utf-8') if sys.version_info[0] == 2 else iter359) oprot.writeListEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeProperty_args(object): """ Attributes: - login - property """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'property', 'UTF8', None, ), # 2 ) def __init__(self, login=None, property=None,): self.login = login self.property = property def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.property = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeProperty_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.property is not None: oprot.writeFieldBegin('property', TType.STRING, 2) oprot.writeString(self.property.encode('utf-8') if sys.version_info[0] == 2 else self.property) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeProperty_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeProperty_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class setProperty_args(object): """ Attributes: - login - property - value """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'property', 'UTF8', None, ), # 2 (3, TType.STRING, 'value', 'UTF8', None, ), # 3 ) def __init__(self, login=None, property=None, value=None,): self.login = login self.property = property self.value = value def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.property = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.value = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setProperty_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.property is not None: oprot.writeFieldBegin('property', TType.STRING, 2) oprot.writeString(self.property.encode('utf-8') if sys.version_info[0] == 2 else self.property) oprot.writeFieldEnd() if self.value is not None: oprot.writeFieldBegin('value', TType.STRING, 3) oprot.writeString(self.value.encode('utf-8') if sys.version_info[0] == 2 else self.value) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class setProperty_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setProperty_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class testClassLoad_args(object): """ Attributes: - login - className - asTypeName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'className', 'UTF8', None, ), # 2 (3, TType.STRING, 'asTypeName', 'UTF8', None, ), # 3 ) def __init__(self, login=None, className=None, asTypeName=None,): self.login = login self.className = className self.asTypeName = asTypeName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.className = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.asTypeName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('testClassLoad_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.className is not None: oprot.writeFieldBegin('className', TType.STRING, 2) oprot.writeString(self.className.encode('utf-8') if sys.version_info[0] == 2 else self.className) oprot.writeFieldEnd() if self.asTypeName is not None: oprot.writeFieldBegin('asTypeName', TType.STRING, 3) oprot.writeString(self.asTypeName.encode('utf-8') if sys.version_info[0] == 2 else self.asTypeName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class testClassLoad_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('testClassLoad_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class authenticateUser_args(object): """ Attributes: - login - user - properties """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.MAP, 'properties', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 ) def __init__(self, login=None, user=None, properties=None,): self.login = login self.user = user self.properties = properties def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.MAP: self.properties = {} (_ktype361, _vtype362, _size360) = iprot.readMapBegin() for _i364 in range(_size360): _key365 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val366 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.properties[_key365] = _val366 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('authenticateUser_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.properties is not None: oprot.writeFieldBegin('properties', TType.MAP, 3) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.properties)) for kiter367, viter368 in self.properties.items(): oprot.writeString(kiter367.encode('utf-8') if sys.version_info[0] == 2 else kiter367) oprot.writeString(viter368.encode('utf-8') if sys.version_info[0] == 2 else viter368) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class authenticateUser_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('authenticateUser_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class changeUserAuthorizations_args(object): """ Attributes: - login - user - authorizations """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.SET, 'authorizations', (TType.STRING, 'BINARY', False), None, ), # 3 ) def __init__(self, login=None, user=None, authorizations=None,): self.login = login self.user = user self.authorizations = authorizations def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.SET: self.authorizations = set() (_etype372, _size369) = iprot.readSetBegin() for _i373 in range(_size369): _elem374 = iprot.readBinary() self.authorizations.add(_elem374) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('changeUserAuthorizations_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.authorizations is not None: oprot.writeFieldBegin('authorizations', TType.SET, 3) oprot.writeSetBegin(TType.STRING, len(self.authorizations)) for iter375 in self.authorizations: oprot.writeBinary(iter375) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class changeUserAuthorizations_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('changeUserAuthorizations_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class changeLocalUserPassword_args(object): """ Attributes: - login - user - password """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.STRING, 'password', 'BINARY', None, ), # 3 ) def __init__(self, login=None, user=None, password=None,): self.login = login self.user = user self.password = password def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.password = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('changeLocalUserPassword_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.password is not None: oprot.writeFieldBegin('password', TType.STRING, 3) oprot.writeBinary(self.password) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class changeLocalUserPassword_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('changeLocalUserPassword_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createLocalUser_args(object): """ Attributes: - login - user - password """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.STRING, 'password', 'BINARY', None, ), # 3 ) def __init__(self, login=None, user=None, password=None,): self.login = login self.user = user self.password = password def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.password = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createLocalUser_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.password is not None: oprot.writeFieldBegin('password', TType.STRING, 3) oprot.writeBinary(self.password) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createLocalUser_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createLocalUser_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class dropLocalUser_args(object): """ Attributes: - login - user """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 ) def __init__(self, login=None, user=None,): self.login = login self.user = user def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('dropLocalUser_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class dropLocalUser_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('dropLocalUser_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getUserAuthorizations_args(object): """ Attributes: - login - user """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 ) def __init__(self, login=None, user=None,): self.login = login self.user = user def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getUserAuthorizations_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getUserAuthorizations_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING, 'BINARY', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype379, _size376) = iprot.readListBegin() for _i380 in range(_size376): _elem381 = iprot.readBinary() self.success.append(_elem381) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getUserAuthorizations_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRING, len(self.success)) for iter382 in self.success: oprot.writeBinary(iter382) oprot.writeListEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class grantSystemPermission_args(object): """ Attributes: - login - user - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.I32, 'perm', None, None, ), # 3 ) def __init__(self, login=None, user=None, perm=None,): self.login = login self.user = user self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('grantSystemPermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 3) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class grantSystemPermission_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('grantSystemPermission_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class grantTablePermission_args(object): """ Attributes: - login - user - table - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.STRING, 'table', 'UTF8', None, ), # 3 (4, TType.I32, 'perm', None, None, ), # 4 ) def __init__(self, login=None, user=None, table=None, perm=None,): self.login = login self.user = user self.table = table self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.table = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('grantTablePermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.table is not None: oprot.writeFieldBegin('table', TType.STRING, 3) oprot.writeString(self.table.encode('utf-8') if sys.version_info[0] == 2 else self.table) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 4) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class grantTablePermission_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('grantTablePermission_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class hasSystemPermission_args(object): """ Attributes: - login - user - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.I32, 'perm', None, None, ), # 3 ) def __init__(self, login=None, user=None, perm=None,): self.login = login self.user = user self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('hasSystemPermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 3) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class hasSystemPermission_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('hasSystemPermission_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class hasTablePermission_args(object): """ Attributes: - login - user - table - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.STRING, 'table', 'UTF8', None, ), # 3 (4, TType.I32, 'perm', None, None, ), # 4 ) def __init__(self, login=None, user=None, table=None, perm=None,): self.login = login self.user = user self.table = table self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.table = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('hasTablePermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.table is not None: oprot.writeFieldBegin('table', TType.STRING, 3) oprot.writeString(self.table.encode('utf-8') if sys.version_info[0] == 2 else self.table) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 4) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class hasTablePermission_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('hasTablePermission_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listLocalUsers_args(object): """ Attributes: - login """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 ) def __init__(self, login=None,): self.login = login def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listLocalUsers_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listLocalUsers_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.SET, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.SET: self.success = set() (_etype386, _size383) = iprot.readSetBegin() for _i387 in range(_size383): _elem388 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success.add(_elem388) iprot.readSetEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listLocalUsers_result') if self.success is not None: oprot.writeFieldBegin('success', TType.SET, 0) oprot.writeSetBegin(TType.STRING, len(self.success)) for iter389 in self.success: oprot.writeString(iter389.encode('utf-8') if sys.version_info[0] == 2 else iter389) oprot.writeSetEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class revokeSystemPermission_args(object): """ Attributes: - login - user - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.I32, 'perm', None, None, ), # 3 ) def __init__(self, login=None, user=None, perm=None,): self.login = login self.user = user self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('revokeSystemPermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 3) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class revokeSystemPermission_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('revokeSystemPermission_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class revokeTablePermission_args(object): """ Attributes: - login - user - table - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.STRING, 'table', 'UTF8', None, ), # 3 (4, TType.I32, 'perm', None, None, ), # 4 ) def __init__(self, login=None, user=None, table=None, perm=None,): self.login = login self.user = user self.table = table self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.table = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('revokeTablePermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.table is not None: oprot.writeFieldBegin('table', TType.STRING, 3) oprot.writeString(self.table.encode('utf-8') if sys.version_info[0] == 2 else self.table) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 4) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class revokeTablePermission_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('revokeTablePermission_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class grantNamespacePermission_args(object): """ Attributes: - login - user - namespaceName - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.STRING, 'namespaceName', 'UTF8', None, ), # 3 (4, TType.I32, 'perm', None, None, ), # 4 ) def __init__(self, login=None, user=None, namespaceName=None, perm=None,): self.login = login self.user = user self.namespaceName = namespaceName self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('grantNamespacePermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 3) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 4) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class grantNamespacePermission_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('grantNamespacePermission_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class hasNamespacePermission_args(object): """ Attributes: - login - user - namespaceName - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.STRING, 'namespaceName', 'UTF8', None, ), # 3 (4, TType.I32, 'perm', None, None, ), # 4 ) def __init__(self, login=None, user=None, namespaceName=None, perm=None,): self.login = login self.user = user self.namespaceName = namespaceName self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('hasNamespacePermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 3) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 4) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class hasNamespacePermission_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('hasNamespacePermission_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class revokeNamespacePermission_args(object): """ Attributes: - login - user - namespaceName - perm """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'user', 'UTF8', None, ), # 2 (3, TType.STRING, 'namespaceName', 'UTF8', None, ), # 3 (4, TType.I32, 'perm', None, None, ), # 4 ) def __init__(self, login=None, user=None, namespaceName=None, perm=None,): self.login = login self.user = user self.namespaceName = namespaceName self.perm = perm def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.perm = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('revokeNamespacePermission_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.user is not None: oprot.writeFieldBegin('user', TType.STRING, 2) oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 3) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.perm is not None: oprot.writeFieldBegin('perm', TType.I32, 4) oprot.writeI32(self.perm) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class revokeNamespacePermission_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('revokeNamespacePermission_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createBatchScanner_args(object): """ Attributes: - login - tableName - options """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'options', (BatchScanOptions, BatchScanOptions.thrift_spec), None, ), # 3 ) def __init__(self, login=None, tableName=None, options=None,): self.login = login self.tableName = tableName self.options = options def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.options = BatchScanOptions() self.options.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createBatchScanner_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.options is not None: oprot.writeFieldBegin('options', TType.STRUCT, 3) self.options.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createBatchScanner_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRING, 'success', 'UTF8', None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createBatchScanner_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createScanner_args(object): """ Attributes: - login - tableName - options """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'options', (ScanOptions, ScanOptions.thrift_spec), None, ), # 3 ) def __init__(self, login=None, tableName=None, options=None,): self.login = login self.tableName = tableName self.options = options def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.options = ScanOptions() self.options.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createScanner_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.options is not None: oprot.writeFieldBegin('options', TType.STRUCT, 3) self.options.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createScanner_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRING, 'success', 'UTF8', None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createScanner_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class hasNext_args(object): """ Attributes: - scanner """ thrift_spec = ( None, # 0 (1, TType.STRING, 'scanner', 'UTF8', None, ), # 1 ) def __init__(self, scanner=None,): self.scanner = scanner def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.scanner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('hasNext_args') if self.scanner is not None: oprot.writeFieldBegin('scanner', TType.STRING, 1) oprot.writeString(self.scanner.encode('utf-8') if sys.version_info[0] == 2 else self.scanner) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class hasNext_result(object): """ Attributes: - success - ouch1 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (UnknownScanner, UnknownScanner.thrift_spec), None, ), # 1 ) def __init__(self, success=None, ouch1=None,): self.success = success self.ouch1 = ouch1 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = UnknownScanner() self.ouch1.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('hasNext_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class nextEntry_args(object): """ Attributes: - scanner """ thrift_spec = ( None, # 0 (1, TType.STRING, 'scanner', 'UTF8', None, ), # 1 ) def __init__(self, scanner=None,): self.scanner = scanner def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.scanner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('nextEntry_args') if self.scanner is not None: oprot.writeFieldBegin('scanner', TType.STRING, 1) oprot.writeString(self.scanner.encode('utf-8') if sys.version_info[0] == 2 else self.scanner) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class nextEntry_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRUCT, 'success', (KeyValueAndPeek, KeyValueAndPeek.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'ouch1', (NoMoreEntriesException, NoMoreEntriesException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (UnknownScanner, UnknownScanner.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = KeyValueAndPeek() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = NoMoreEntriesException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = UnknownScanner() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = AccumuloSecurityException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('nextEntry_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class nextK_args(object): """ Attributes: - scanner - k """ thrift_spec = ( None, # 0 (1, TType.STRING, 'scanner', 'UTF8', None, ), # 1 (2, TType.I32, 'k', None, None, ), # 2 ) def __init__(self, scanner=None, k=None,): self.scanner = scanner self.k = k def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.scanner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.k = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('nextK_args') if self.scanner is not None: oprot.writeFieldBegin('scanner', TType.STRING, 1) oprot.writeString(self.scanner.encode('utf-8') if sys.version_info[0] == 2 else self.scanner) oprot.writeFieldEnd() if self.k is not None: oprot.writeFieldBegin('k', TType.I32, 2) oprot.writeI32(self.k) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class nextK_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRUCT, 'success', (ScanResult, ScanResult.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'ouch1', (NoMoreEntriesException, NoMoreEntriesException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (UnknownScanner, UnknownScanner.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = ScanResult() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = NoMoreEntriesException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = UnknownScanner() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = AccumuloSecurityException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('nextK_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class closeScanner_args(object): """ Attributes: - scanner """ thrift_spec = ( None, # 0 (1, TType.STRING, 'scanner', 'UTF8', None, ), # 1 ) def __init__(self, scanner=None,): self.scanner = scanner def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.scanner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('closeScanner_args') if self.scanner is not None: oprot.writeFieldBegin('scanner', TType.STRING, 1) oprot.writeString(self.scanner.encode('utf-8') if sys.version_info[0] == 2 else self.scanner) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class closeScanner_result(object): """ Attributes: - ouch1 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (UnknownScanner, UnknownScanner.thrift_spec), None, ), # 1 ) def __init__(self, ouch1=None,): self.ouch1 = ouch1 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = UnknownScanner() self.ouch1.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('closeScanner_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class updateAndFlush_args(object): """ Attributes: - login - tableName - cells """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.MAP, 'cells', (TType.STRING, 'BINARY', TType.LIST, (TType.STRUCT, (ColumnUpdate, ColumnUpdate.thrift_spec), False), False), None, ), # 3 ) def __init__(self, login=None, tableName=None, cells=None,): self.login = login self.tableName = tableName self.cells = cells def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.MAP: self.cells = {} (_ktype391, _vtype392, _size390) = iprot.readMapBegin() for _i394 in range(_size390): _key395 = iprot.readBinary() _val396 = [] (_etype400, _size397) = iprot.readListBegin() for _i401 in range(_size397): _elem402 = ColumnUpdate() _elem402.read(iprot) _val396.append(_elem402) iprot.readListEnd() self.cells[_key395] = _val396 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('updateAndFlush_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.cells is not None: oprot.writeFieldBegin('cells', TType.MAP, 3) oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.cells)) for kiter403, viter404 in self.cells.items(): oprot.writeBinary(kiter403) oprot.writeListBegin(TType.STRUCT, len(viter404)) for iter405 in viter404: iter405.write(oprot) oprot.writeListEnd() oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class updateAndFlush_result(object): """ Attributes: - outch1 - ouch2 - ouch3 - ouch4 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'outch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 (4, TType.STRUCT, 'ouch4', (MutationsRejectedException, MutationsRejectedException.thrift_spec), None, ), # 4 ) def __init__(self, outch1=None, ouch2=None, ouch3=None, ouch4=None,): self.outch1 = outch1 self.ouch2 = ouch2 self.ouch3 = ouch3 self.ouch4 = ouch4 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.outch1 = AccumuloException() self.outch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.ouch4 = MutationsRejectedException() self.ouch4.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('updateAndFlush_result') if self.outch1 is not None: oprot.writeFieldBegin('outch1', TType.STRUCT, 1) self.outch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() if self.ouch4 is not None: oprot.writeFieldBegin('ouch4', TType.STRUCT, 4) self.ouch4.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createWriter_args(object): """ Attributes: - login - tableName - opts """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'opts', (WriterOptions, WriterOptions.thrift_spec), None, ), # 3 ) def __init__(self, login=None, tableName=None, opts=None,): self.login = login self.tableName = tableName self.opts = opts def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.opts = WriterOptions() self.opts.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createWriter_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.opts is not None: oprot.writeFieldBegin('opts', TType.STRUCT, 3) self.opts.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createWriter_result(object): """ Attributes: - success - outch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRING, 'success', 'UTF8', None, ), # 0 (1, TType.STRUCT, 'outch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, outch1=None, ouch2=None, ouch3=None,): self.success = success self.outch1 = outch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.outch1 = AccumuloException() self.outch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createWriter_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success) oprot.writeFieldEnd() if self.outch1 is not None: oprot.writeFieldBegin('outch1', TType.STRUCT, 1) self.outch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class update_args(object): """ Attributes: - writer - cells """ thrift_spec = ( None, # 0 (1, TType.STRING, 'writer', 'UTF8', None, ), # 1 (2, TType.MAP, 'cells', (TType.STRING, 'BINARY', TType.LIST, (TType.STRUCT, (ColumnUpdate, ColumnUpdate.thrift_spec), False), False), None, ), # 2 ) def __init__(self, writer=None, cells=None,): self.writer = writer self.cells = cells def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.writer = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.MAP: self.cells = {} (_ktype407, _vtype408, _size406) = iprot.readMapBegin() for _i410 in range(_size406): _key411 = iprot.readBinary() _val412 = [] (_etype416, _size413) = iprot.readListBegin() for _i417 in range(_size413): _elem418 = ColumnUpdate() _elem418.read(iprot) _val412.append(_elem418) iprot.readListEnd() self.cells[_key411] = _val412 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('update_args') if self.writer is not None: oprot.writeFieldBegin('writer', TType.STRING, 1) oprot.writeString(self.writer.encode('utf-8') if sys.version_info[0] == 2 else self.writer) oprot.writeFieldEnd() if self.cells is not None: oprot.writeFieldBegin('cells', TType.MAP, 2) oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.cells)) for kiter419, viter420 in self.cells.items(): oprot.writeBinary(kiter419) oprot.writeListBegin(TType.STRUCT, len(viter420)) for iter421 in viter420: iter421.write(oprot) oprot.writeListEnd() oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class flush_args(object): """ Attributes: - writer """ thrift_spec = ( None, # 0 (1, TType.STRING, 'writer', 'UTF8', None, ), # 1 ) def __init__(self, writer=None,): self.writer = writer def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.writer = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('flush_args') if self.writer is not None: oprot.writeFieldBegin('writer', TType.STRING, 1) oprot.writeString(self.writer.encode('utf-8') if sys.version_info[0] == 2 else self.writer) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class flush_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (UnknownWriter, UnknownWriter.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (MutationsRejectedException, MutationsRejectedException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = UnknownWriter() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = MutationsRejectedException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('flush_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class closeWriter_args(object): """ Attributes: - writer """ thrift_spec = ( None, # 0 (1, TType.STRING, 'writer', 'UTF8', None, ), # 1 ) def __init__(self, writer=None,): self.writer = writer def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.writer = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('closeWriter_args') if self.writer is not None: oprot.writeFieldBegin('writer', TType.STRING, 1) oprot.writeString(self.writer.encode('utf-8') if sys.version_info[0] == 2 else self.writer) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class closeWriter_result(object): """ Attributes: - ouch1 - ouch2 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (UnknownWriter, UnknownWriter.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (MutationsRejectedException, MutationsRejectedException.thrift_spec), None, ), # 2 ) def __init__(self, ouch1=None, ouch2=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = UnknownWriter() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = MutationsRejectedException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('closeWriter_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class updateRowConditionally_args(object): """ Attributes: - login - tableName - row - updates """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRING, 'row', 'BINARY', None, ), # 3 (4, TType.STRUCT, 'updates', (ConditionalUpdates, ConditionalUpdates.thrift_spec), None, ), # 4 ) def __init__(self, login=None, tableName=None, row=None, updates=None,): self.login = login self.tableName = tableName self.row = row self.updates = updates def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.row = iprot.readBinary() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.updates = ConditionalUpdates() self.updates.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('updateRowConditionally_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.row is not None: oprot.writeFieldBegin('row', TType.STRING, 3) oprot.writeBinary(self.row) oprot.writeFieldEnd() if self.updates is not None: oprot.writeFieldBegin('updates', TType.STRUCT, 4) self.updates.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class updateRowConditionally_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.I32, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.I32: self.success = iprot.readI32() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('updateRowConditionally_result') if self.success is not None: oprot.writeFieldBegin('success', TType.I32, 0) oprot.writeI32(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createConditionalWriter_args(object): """ Attributes: - login - tableName - options """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'tableName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'options', (ConditionalWriterOptions, ConditionalWriterOptions.thrift_spec), None, ), # 3 ) def __init__(self, login=None, tableName=None, options=None,): self.login = login self.tableName = tableName self.options = options def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tableName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.options = ConditionalWriterOptions() self.options.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createConditionalWriter_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.tableName is not None: oprot.writeFieldBegin('tableName', TType.STRING, 2) oprot.writeString(self.tableName.encode('utf-8') if sys.version_info[0] == 2 else self.tableName) oprot.writeFieldEnd() if self.options is not None: oprot.writeFieldBegin('options', TType.STRUCT, 3) self.options.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createConditionalWriter_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRING, 'success', 'UTF8', None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (TableNotFoundException, TableNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = TableNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createConditionalWriter_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class updateRowsConditionally_args(object): """ Attributes: - conditionalWriter - updates """ thrift_spec = ( None, # 0 (1, TType.STRING, 'conditionalWriter', 'UTF8', None, ), # 1 (2, TType.MAP, 'updates', (TType.STRING, 'BINARY', TType.STRUCT, (ConditionalUpdates, ConditionalUpdates.thrift_spec), False), None, ), # 2 ) def __init__(self, conditionalWriter=None, updates=None,): self.conditionalWriter = conditionalWriter self.updates = updates def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.conditionalWriter = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.MAP: self.updates = {} (_ktype423, _vtype424, _size422) = iprot.readMapBegin() for _i426 in range(_size422): _key427 = iprot.readBinary() _val428 = ConditionalUpdates() _val428.read(iprot) self.updates[_key427] = _val428 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('updateRowsConditionally_args') if self.conditionalWriter is not None: oprot.writeFieldBegin('conditionalWriter', TType.STRING, 1) oprot.writeString(self.conditionalWriter.encode('utf-8') if sys.version_info[0] == 2 else self.conditionalWriter) oprot.writeFieldEnd() if self.updates is not None: oprot.writeFieldBegin('updates', TType.MAP, 2) oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.updates)) for kiter429, viter430 in self.updates.items(): oprot.writeBinary(kiter429) viter430.write(oprot) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class updateRowsConditionally_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'BINARY', TType.I32, None, False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (UnknownWriter, UnknownWriter.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloException, AccumuloException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype432, _vtype433, _size431) = iprot.readMapBegin() for _i435 in range(_size431): _key436 = iprot.readBinary() _val437 = iprot.readI32() self.success[_key436] = _val437 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = UnknownWriter() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = AccumuloSecurityException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('updateRowsConditionally_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.I32, len(self.success)) for kiter438, viter439 in self.success.items(): oprot.writeBinary(kiter438) oprot.writeI32(viter439) oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class closeConditionalWriter_args(object): """ Attributes: - conditionalWriter """ thrift_spec = ( None, # 0 (1, TType.STRING, 'conditionalWriter', 'UTF8', None, ), # 1 ) def __init__(self, conditionalWriter=None,): self.conditionalWriter = conditionalWriter def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.conditionalWriter = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('closeConditionalWriter_args') if self.conditionalWriter is not None: oprot.writeFieldBegin('conditionalWriter', TType.STRING, 1) oprot.writeString(self.conditionalWriter.encode('utf-8') if sys.version_info[0] == 2 else self.conditionalWriter) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class closeConditionalWriter_result(object): thrift_spec = ( ) def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('closeConditionalWriter_result') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getRowRange_args(object): """ Attributes: - row """ thrift_spec = ( None, # 0 (1, TType.STRING, 'row', 'BINARY', None, ), # 1 ) def __init__(self, row=None,): self.row = row def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.row = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getRowRange_args') if self.row is not None: oprot.writeFieldBegin('row', TType.STRING, 1) oprot.writeBinary(self.row) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getRowRange_result(object): """ Attributes: - success """ thrift_spec = ( (0, TType.STRUCT, 'success', (Range, Range.thrift_spec), None, ), # 0 ) def __init__(self, success=None,): self.success = success def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = Range() self.success.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getRowRange_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getFollowing_args(object): """ Attributes: - key - part """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'key', (Key, Key.thrift_spec), None, ), # 1 (2, TType.I32, 'part', None, None, ), # 2 ) def __init__(self, key=None, part=None,): self.key = key self.part = part def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.key = Key() self.key.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.part = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getFollowing_args') if self.key is not None: oprot.writeFieldBegin('key', TType.STRUCT, 1) self.key.write(oprot) oprot.writeFieldEnd() if self.part is not None: oprot.writeFieldBegin('part', TType.I32, 2) oprot.writeI32(self.part) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getFollowing_result(object): """ Attributes: - success """ thrift_spec = ( (0, TType.STRUCT, 'success', (Key, Key.thrift_spec), None, ), # 0 ) def __init__(self, success=None,): self.success = success def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = Key() self.success.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getFollowing_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class systemNamespace_args(object): thrift_spec = ( ) def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('systemNamespace_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class systemNamespace_result(object): """ Attributes: - success """ thrift_spec = ( (0, TType.STRING, 'success', 'UTF8', None, ), # 0 ) def __init__(self, success=None,): self.success = success def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('systemNamespace_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class defaultNamespace_args(object): thrift_spec = ( ) def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('defaultNamespace_args') oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class defaultNamespace_result(object): """ Attributes: - success """ thrift_spec = ( (0, TType.STRING, 'success', 'UTF8', None, ), # 0 ) def __init__(self, success=None,): self.success = success def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('defaultNamespace_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listNamespaces_args(object): """ Attributes: - login """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 ) def __init__(self, login=None,): self.login = login def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listNamespaces_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listNamespaces_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype443, _size440) = iprot.readListBegin() for _i444 in range(_size440): _elem445 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success.append(_elem445) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listNamespaces_result') if self.success is not None: oprot.writeFieldBegin('success', TType.LIST, 0) oprot.writeListBegin(TType.STRING, len(self.success)) for iter446 in self.success: oprot.writeString(iter446.encode('utf-8') if sys.version_info[0] == 2 else iter446) oprot.writeListEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class namespaceExists_args(object): """ Attributes: - login - namespaceName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, namespaceName=None,): self.login = login self.namespaceName = namespaceName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('namespaceExists_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class namespaceExists_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('namespaceExists_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createNamespace_args(object): """ Attributes: - login - namespaceName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, namespaceName=None,): self.login = login self.namespaceName = namespaceName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createNamespace_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class createNamespace_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceExistsException, NamespaceExistsException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceExistsException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('createNamespace_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class deleteNamespace_args(object): """ Attributes: - login - namespaceName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, namespaceName=None,): self.login = login self.namespaceName = namespaceName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('deleteNamespace_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class deleteNamespace_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 - ouch4 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 (4, TType.STRUCT, 'ouch4', (NamespaceNotEmptyException, NamespaceNotEmptyException.thrift_spec), None, ), # 4 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None, ouch4=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 self.ouch4 = ouch4 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.ouch4 = NamespaceNotEmptyException() self.ouch4.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('deleteNamespace_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() if self.ouch4 is not None: oprot.writeFieldBegin('ouch4', TType.STRUCT, 4) self.ouch4.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class renameNamespace_args(object): """ Attributes: - login - oldNamespaceName - newNamespaceName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'oldNamespaceName', 'UTF8', None, ), # 2 (3, TType.STRING, 'newNamespaceName', 'UTF8', None, ), # 3 ) def __init__(self, login=None, oldNamespaceName=None, newNamespaceName=None,): self.login = login self.oldNamespaceName = oldNamespaceName self.newNamespaceName = newNamespaceName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.oldNamespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.newNamespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('renameNamespace_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.oldNamespaceName is not None: oprot.writeFieldBegin('oldNamespaceName', TType.STRING, 2) oprot.writeString(self.oldNamespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.oldNamespaceName) oprot.writeFieldEnd() if self.newNamespaceName is not None: oprot.writeFieldBegin('newNamespaceName', TType.STRING, 3) oprot.writeString(self.newNamespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.newNamespaceName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class renameNamespace_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 - ouch4 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 (4, TType.STRUCT, 'ouch4', (NamespaceExistsException, NamespaceExistsException.thrift_spec), None, ), # 4 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None, ouch4=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 self.ouch4 = ouch4 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.ouch4 = NamespaceExistsException() self.ouch4.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('renameNamespace_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() if self.ouch4 is not None: oprot.writeFieldBegin('ouch4', TType.STRUCT, 4) self.ouch4.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class setNamespaceProperty_args(object): """ Attributes: - login - namespaceName - property - value """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.STRING, 'property', 'UTF8', None, ), # 3 (4, TType.STRING, 'value', 'UTF8', None, ), # 4 ) def __init__(self, login=None, namespaceName=None, property=None, value=None,): self.login = login self.namespaceName = namespaceName self.property = property self.value = value def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.property = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.value = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setNamespaceProperty_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.property is not None: oprot.writeFieldBegin('property', TType.STRING, 3) oprot.writeString(self.property.encode('utf-8') if sys.version_info[0] == 2 else self.property) oprot.writeFieldEnd() if self.value is not None: oprot.writeFieldBegin('value', TType.STRING, 4) oprot.writeString(self.value.encode('utf-8') if sys.version_info[0] == 2 else self.value) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class setNamespaceProperty_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setNamespaceProperty_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeNamespaceProperty_args(object): """ Attributes: - login - namespaceName - property """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.STRING, 'property', 'UTF8', None, ), # 3 ) def __init__(self, login=None, namespaceName=None, property=None,): self.login = login self.namespaceName = namespaceName self.property = property def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.property = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeNamespaceProperty_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.property is not None: oprot.writeFieldBegin('property', TType.STRING, 3) oprot.writeString(self.property.encode('utf-8') if sys.version_info[0] == 2 else self.property) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeNamespaceProperty_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeNamespaceProperty_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getNamespaceProperties_args(object): """ Attributes: - login - namespaceName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, namespaceName=None,): self.login = login self.namespaceName = namespaceName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getNamespaceProperties_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getNamespaceProperties_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype448, _vtype449, _size447) = iprot.readMapBegin() for _i451 in range(_size447): _key452 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val453 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success[_key452] = _val453 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getNamespaceProperties_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success)) for kiter454, viter455 in self.success.items(): oprot.writeString(kiter454.encode('utf-8') if sys.version_info[0] == 2 else kiter454) oprot.writeString(viter455.encode('utf-8') if sys.version_info[0] == 2 else viter455) oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class namespaceIdMap_args(object): """ Attributes: - login """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 ) def __init__(self, login=None,): self.login = login def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('namespaceIdMap_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class namespaceIdMap_result(object): """ Attributes: - success - ouch1 - ouch2 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 ) def __init__(self, success=None, ouch1=None, ouch2=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype457, _vtype458, _size456) = iprot.readMapBegin() for _i460 in range(_size456): _key461 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val462 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.success[_key461] = _val462 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('namespaceIdMap_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success)) for kiter463, viter464 in self.success.items(): oprot.writeString(kiter463.encode('utf-8') if sys.version_info[0] == 2 else kiter463) oprot.writeString(viter464.encode('utf-8') if sys.version_info[0] == 2 else viter464) oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class attachNamespaceIterator_args(object): """ Attributes: - login - namespaceName - setting - scopes """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'setting', (IteratorSetting, IteratorSetting.thrift_spec), None, ), # 3 (4, TType.SET, 'scopes', (TType.I32, None, False), None, ), # 4 ) def __init__(self, login=None, namespaceName=None, setting=None, scopes=None,): self.login = login self.namespaceName = namespaceName self.setting = setting self.scopes = scopes def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.setting = IteratorSetting() self.setting.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.SET: self.scopes = set() (_etype468, _size465) = iprot.readSetBegin() for _i469 in range(_size465): _elem470 = iprot.readI32() self.scopes.add(_elem470) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('attachNamespaceIterator_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.setting is not None: oprot.writeFieldBegin('setting', TType.STRUCT, 3) self.setting.write(oprot) oprot.writeFieldEnd() if self.scopes is not None: oprot.writeFieldBegin('scopes', TType.SET, 4) oprot.writeSetBegin(TType.I32, len(self.scopes)) for iter471 in self.scopes: oprot.writeI32(iter471) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class attachNamespaceIterator_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('attachNamespaceIterator_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeNamespaceIterator_args(object): """ Attributes: - login - namespaceName - name - scopes """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.STRING, 'name', 'UTF8', None, ), # 3 (4, TType.SET, 'scopes', (TType.I32, None, False), None, ), # 4 ) def __init__(self, login=None, namespaceName=None, name=None, scopes=None,): self.login = login self.namespaceName = namespaceName self.name = name self.scopes = scopes def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.SET: self.scopes = set() (_etype475, _size472) = iprot.readSetBegin() for _i476 in range(_size472): _elem477 = iprot.readI32() self.scopes.add(_elem477) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeNamespaceIterator_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.name is not None: oprot.writeFieldBegin('name', TType.STRING, 3) oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name) oprot.writeFieldEnd() if self.scopes is not None: oprot.writeFieldBegin('scopes', TType.SET, 4) oprot.writeSetBegin(TType.I32, len(self.scopes)) for iter478 in self.scopes: oprot.writeI32(iter478) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeNamespaceIterator_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeNamespaceIterator_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getNamespaceIteratorSetting_args(object): """ Attributes: - login - namespaceName - name - scope """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.STRING, 'name', 'UTF8', None, ), # 3 (4, TType.I32, 'scope', None, None, ), # 4 ) def __init__(self, login=None, namespaceName=None, name=None, scope=None,): self.login = login self.namespaceName = namespaceName self.name = name self.scope = scope def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I32: self.scope = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getNamespaceIteratorSetting_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.name is not None: oprot.writeFieldBegin('name', TType.STRING, 3) oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name) oprot.writeFieldEnd() if self.scope is not None: oprot.writeFieldBegin('scope', TType.I32, 4) oprot.writeI32(self.scope) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class getNamespaceIteratorSetting_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.STRUCT, 'success', (IteratorSetting, IteratorSetting.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = IteratorSetting() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getNamespaceIteratorSetting_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listNamespaceIterators_args(object): """ Attributes: - login - namespaceName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, namespaceName=None,): self.login = login self.namespaceName = namespaceName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listNamespaceIterators_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listNamespaceIterators_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.SET, (TType.I32, None, False), False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype480, _vtype481, _size479) = iprot.readMapBegin() for _i483 in range(_size479): _key484 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val485 = set() (_etype489, _size486) = iprot.readSetBegin() for _i490 in range(_size486): _elem491 = iprot.readI32() _val485.add(_elem491) iprot.readSetEnd() self.success[_key484] = _val485 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listNamespaceIterators_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.SET, len(self.success)) for kiter492, viter493 in self.success.items(): oprot.writeString(kiter492.encode('utf-8') if sys.version_info[0] == 2 else kiter492) oprot.writeSetBegin(TType.I32, len(viter493)) for iter494 in viter493: oprot.writeI32(iter494) oprot.writeSetEnd() oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class checkNamespaceIteratorConflicts_args(object): """ Attributes: - login - namespaceName - setting - scopes """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.STRUCT, 'setting', (IteratorSetting, IteratorSetting.thrift_spec), None, ), # 3 (4, TType.SET, 'scopes', (TType.I32, None, False), None, ), # 4 ) def __init__(self, login=None, namespaceName=None, setting=None, scopes=None,): self.login = login self.namespaceName = namespaceName self.setting = setting self.scopes = scopes def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.setting = IteratorSetting() self.setting.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.SET: self.scopes = set() (_etype498, _size495) = iprot.readSetBegin() for _i499 in range(_size495): _elem500 = iprot.readI32() self.scopes.add(_elem500) iprot.readSetEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('checkNamespaceIteratorConflicts_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.setting is not None: oprot.writeFieldBegin('setting', TType.STRUCT, 3) self.setting.write(oprot) oprot.writeFieldEnd() if self.scopes is not None: oprot.writeFieldBegin('scopes', TType.SET, 4) oprot.writeSetBegin(TType.I32, len(self.scopes)) for iter501 in self.scopes: oprot.writeI32(iter501) oprot.writeSetEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class checkNamespaceIteratorConflicts_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('checkNamespaceIteratorConflicts_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class addNamespaceConstraint_args(object): """ Attributes: - login - namespaceName - constraintClassName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.STRING, 'constraintClassName', 'UTF8', None, ), # 3 ) def __init__(self, login=None, namespaceName=None, constraintClassName=None,): self.login = login self.namespaceName = namespaceName self.constraintClassName = constraintClassName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.constraintClassName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('addNamespaceConstraint_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.constraintClassName is not None: oprot.writeFieldBegin('constraintClassName', TType.STRING, 3) oprot.writeString(self.constraintClassName.encode('utf-8') if sys.version_info[0] == 2 else self.constraintClassName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class addNamespaceConstraint_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.I32, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.I32: self.success = iprot.readI32() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('addNamespaceConstraint_result') if self.success is not None: oprot.writeFieldBegin('success', TType.I32, 0) oprot.writeI32(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeNamespaceConstraint_args(object): """ Attributes: - login - namespaceName - id """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.I32, 'id', None, None, ), # 3 ) def __init__(self, login=None, namespaceName=None, id=None,): self.login = login self.namespaceName = namespaceName self.id = id def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I32: self.id = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeNamespaceConstraint_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.id is not None: oprot.writeFieldBegin('id', TType.I32, 3) oprot.writeI32(self.id) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class removeNamespaceConstraint_result(object): """ Attributes: - ouch1 - ouch2 - ouch3 """ thrift_spec = ( None, # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, ouch1=None, ouch2=None, ouch3=None,): self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('removeNamespaceConstraint_result') if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listNamespaceConstraints_args(object): """ Attributes: - login - namespaceName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 ) def __init__(self, login=None, namespaceName=None,): self.login = login self.namespaceName = namespaceName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listNamespaceConstraints_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class listNamespaceConstraints_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.I32, None, False), None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.MAP: self.success = {} (_ktype503, _vtype504, _size502) = iprot.readMapBegin() for _i506 in range(_size502): _key507 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val508 = iprot.readI32() self.success[_key507] = _val508 iprot.readMapEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('listNamespaceConstraints_result') if self.success is not None: oprot.writeFieldBegin('success', TType.MAP, 0) oprot.writeMapBegin(TType.STRING, TType.I32, len(self.success)) for kiter509, viter510 in self.success.items(): oprot.writeString(kiter509.encode('utf-8') if sys.version_info[0] == 2 else kiter509) oprot.writeI32(viter510) oprot.writeMapEnd() oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class testNamespaceClassLoad_args(object): """ Attributes: - login - namespaceName - className - asTypeName """ thrift_spec = ( None, # 0 (1, TType.STRING, 'login', 'BINARY', None, ), # 1 (2, TType.STRING, 'namespaceName', 'UTF8', None, ), # 2 (3, TType.STRING, 'className', 'UTF8', None, ), # 3 (4, TType.STRING, 'asTypeName', 'UTF8', None, ), # 4 ) def __init__(self, login=None, namespaceName=None, className=None, asTypeName=None,): self.login = login self.namespaceName = namespaceName self.className = className self.asTypeName = asTypeName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.login = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.namespaceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.className = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.asTypeName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('testNamespaceClassLoad_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() if self.namespaceName is not None: oprot.writeFieldBegin('namespaceName', TType.STRING, 2) oprot.writeString(self.namespaceName.encode('utf-8') if sys.version_info[0] == 2 else self.namespaceName) oprot.writeFieldEnd() if self.className is not None: oprot.writeFieldBegin('className', TType.STRING, 3) oprot.writeString(self.className.encode('utf-8') if sys.version_info[0] == 2 else self.className) oprot.writeFieldEnd() if self.asTypeName is not None: oprot.writeFieldBegin('asTypeName', TType.STRING, 4) oprot.writeString(self.asTypeName.encode('utf-8') if sys.version_info[0] == 2 else self.asTypeName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class testNamespaceClassLoad_result(object): """ Attributes: - success - ouch1 - ouch2 - ouch3 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 (1, TType.STRUCT, 'ouch1', (AccumuloException, AccumuloException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'ouch2', (AccumuloSecurityException, AccumuloSecurityException.thrift_spec), None, ), # 2 (3, TType.STRUCT, 'ouch3', (NamespaceNotFoundException, NamespaceNotFoundException.thrift_spec), None, ), # 3 ) def __init__(self, success=None, ouch1=None, ouch2=None, ouch3=None,): self.success = success self.ouch1 = ouch1 self.ouch2 = ouch2 self.ouch3 = ouch3 def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.BOOL: self.success = iprot.readBool() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.ouch1 = AccumuloException() self.ouch1.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ouch2 = AccumuloSecurityException() self.ouch2.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.ouch3 = NamespaceNotFoundException() self.ouch3.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('testNamespaceClassLoad_result') if self.success is not None: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() if self.ouch1 is not None: oprot.writeFieldBegin('ouch1', TType.STRUCT, 1) self.ouch1.write(oprot) oprot.writeFieldEnd() if self.ouch2 is not None: oprot.writeFieldBegin('ouch2', TType.STRUCT, 2) self.ouch2.write(oprot) oprot.writeFieldEnd() if self.ouch3 is not None: oprot.writeFieldBegin('ouch3', TType.STRUCT, 3) self.ouch3.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other)
36.113803
160
0.574619
4a14cd73b9acee1f97938e9040758e7205b83db8
4,398
py
Python
quetz/jobs/rest_models.py
maresb/quetz
55313ca9c2ae04577d23a1dddb38c045b4a056f4
[ "BSD-3-Clause" ]
108
2020-09-16T16:15:01.000Z
2022-03-29T02:49:31.000Z
quetz/jobs/rest_models.py
maresb/quetz
55313ca9c2ae04577d23a1dddb38c045b4a056f4
[ "BSD-3-Clause" ]
317
2020-09-07T18:37:33.000Z
2022-03-25T13:10:41.000Z
quetz/jobs/rest_models.py
maresb/quetz
55313ca9c2ae04577d23a1dddb38c045b4a056f4
[ "BSD-3-Clause" ]
36
2020-09-07T22:01:27.000Z
2022-03-26T17:06:07.000Z
import logging import pickle import uuid from datetime import datetime from typing import Optional from importlib_metadata import entry_points as get_entry_points from pydantic import BaseModel, Field, validator from . import handlers from .models import JobStatus, TaskStatus logger = logging.getLogger("quetz") def parse_job_manifest(function_name): """validate and parse job function name from a string Examples: parse_job_manifest("some_function") returns one of the built-in functions registered in quetz.jobs.handlers modules parse_job_manifest("plugin:function_name") returns a function from a moduled registered with plugin's quetz.jobs entrypoint parse_job_manifest("non_existent_function") raises ValueError for unknown functions """ paths = function_name.split(":") if len(paths) == 2: plugin_name, job_name = paths entry_points = tuple( get_entry_points().select(group='quetz.jobs', name=plugin_name) ) if not entry_points: raise ValueError( f"invalid function {function_name}: " f"plugin {plugin_name} not installed" ) job_module = entry_points[0].load() try: return getattr(job_module, job_name) except AttributeError: raise ValueError( f"invalid function '{job_name}' name in plugin '{plugin_name}'" ) elif len(paths) == 1: try: return handlers.JOB_HANDLERS[function_name] except KeyError: raise ValueError( f"invalid function {function_name}: no such built-in function," " please provide plugin name" ) else: raise ValueError(f"invalid function {function_name} - could not parse") def parse_job_name(v): try: return v.decode("ascii") except UnicodeDecodeError: pass # try unpickling try: func = pickle.loads(v) return f"{func.__module__}:{func.__name__}" except pickle.UnpicklingError: raise ValueError("could not parse manifest") except ModuleNotFoundError as e: logger.error(f"job function not found: could not import module {e.name}") return f"{e.name}:undefined" class JobBase(BaseModel): """New job spec""" items_spec: str = Field(..., title='Item selector spec') manifest: str = Field(None, title='Name of the function') start_at: Optional[datetime] = Field( None, title="date and time the job should start, if None it starts immediately" ) repeat_every_seconds: Optional[int] = Field( None, title=( "interval in seconds at which the job should be repeated, " "if None it is a one-off job" ), ) @validator("manifest", pre=True) def validate_job_name(cls, function_name): if isinstance(function_name, bytes): return parse_job_name(function_name) parse_job_manifest(function_name) return function_name.encode('ascii') class JobUpdateModel(BaseModel): """Modify job spec items (status and items_spec)""" items_spec: str = Field(None, title='Item selector spec') status: JobStatus = Field(None, title='Change status') force: bool = Field(False, title="force re-running job on all matching packages") class Job(JobBase): id: int = Field(None, title='Unique id for job') owner_id: uuid.UUID = Field(None, title='User id of the owner') created: datetime = Field(None, title='Created at') status: JobStatus = Field(None, title='Status of the job (running, paused, ...)') items_spec: str = Field(None, title='Item selector spec') class Config: orm_mode = True class Task(BaseModel): id: int = Field(None, title='Unique id for task') job_id: int = Field(None, title='ID of the parent job') package_version: dict = Field(None, title='Package version') created: datetime = Field(None, title='Created at') status: TaskStatus = Field(None, title='Status of the task (running, paused, ...)') @validator("package_version", pre=True) def convert_package_version(cls, v): if v: return {'filename': v.filename, 'id': uuid.UUID(bytes=v.id).hex} else: return {} class Config: orm_mode = True
29.32
87
0.645975
4a14cd7868b0061c5183291d7d5c6d6e9955ef57
1,568
py
Python
lookup_extensions/backends/postgresql/base.py
uncovertruth/django-lookup-extensions
3a8a57130c9092fc6b2458041084746488720b57
[ "MIT" ]
4
2018-05-23T08:01:55.000Z
2019-01-18T00:51:11.000Z
lookup_extensions/backends/postgresql/base.py
uncovertruth/django-lookup-extensions
3a8a57130c9092fc6b2458041084746488720b57
[ "MIT" ]
506
2018-02-22T07:52:29.000Z
2019-11-04T14:26:27.000Z
lookup_extensions/backends/postgresql/base.py
uncovertruth/django-lookup-extensions
3a8a57130c9092fc6b2458041084746488720b57
[ "MIT" ]
null
null
null
from django.db.backends.postgresql.base import \ DatabaseWrapper as DjangoDatabaseWrapper from lookup_extensions.utils import merge_dicts from .operations import DatabaseOperations class ExtendedDatabaseWrapperMixin(object): ops_class = DatabaseOperations operators = merge_dicts( DjangoDatabaseWrapper.operators, { # For negates 'neexact': '<> %s', 'neiexact': '<> UPPER(%s)', 'necontains': 'NOT LIKE %s', 'neicontains': 'NOT LIKE UPPER(%s)', 'neregex': '!~ %s', 'neiregex': '!~* %s', 'nestartswith': 'NOT LIKE %s', 'neendswith': 'NOT LIKE %s', 'neistartswith': 'NOT LIKE UPPER(%s)', 'neiendswith': 'NOT LIKE UPPER(%s)', # For exregex 'exregex': '~ %s', 'exiregex': '~* %s', 'neexregex': '!~ %s', 'neexiregex': '!~* %s', } ) pattern_ops = merge_dicts( DjangoDatabaseWrapper.pattern_ops, { 'necontains': r"NOT LIKE '%%' || {} || '%%'", 'neicontains': r"NOT LIKE '%%' || UPPER({}) || '%%'", 'nestartswith': r"NOT LIKE {} || '%%'", 'neistartswith': r"NOT LIKE UPPER({}) || '%%'", 'neendswith': r"NOT LIKE '%%' || {}", 'neiendswith': r"NOT LIKE '%%' || UPPER({})", } ) regex_synonyms = { '\\<': '[[:<:]]', '\\>': '[[:>:]]', } class DatabaseWrapper(ExtendedDatabaseWrapperMixin, DjangoDatabaseWrapper): pass
30.745098
75
0.492985
4a14cdb0e6bc28be851d1c545571d2ff32f2505d
4,069
py
Python
mbed_tools_ci_scripts/generate_news.py
urutva/mbed-tools-ci-scripts
eef162a47f56f95cbb7ecaeac1e629ca8abd7a94
[ "Apache-2.0" ]
2
2020-04-30T13:44:37.000Z
2020-06-11T09:39:10.000Z
mbed_tools_ci_scripts/generate_news.py
urutva/mbed-tools-ci-scripts
eef162a47f56f95cbb7ecaeac1e629ca8abd7a94
[ "Apache-2.0" ]
43
2020-02-04T17:25:24.000Z
2021-02-17T19:40:09.000Z
mbed_tools_ci_scripts/generate_news.py
urutva/mbed-tools-ci-scripts
eef162a47f56f95cbb7ecaeac1e629ca8abd7a94
[ "Apache-2.0" ]
4
2020-04-19T16:29:29.000Z
2020-11-13T12:08:31.000Z
# # Copyright (C) 2020 Arm Mbed. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """Handles usage of towncrier for automated changelog generation and pyautoversion for versioning.""" import sys import argparse import logging import os import subprocess from auto_version import auto_version_tool from mbed_tools_ci_scripts.utils.definitions import CommitType from mbed_tools_ci_scripts.utils.configuration import configuration, ConfigurationVariable from mbed_tools_ci_scripts.utils.logging import log_exception, set_log_level from mbed_tools_ci_scripts.utils.filesystem_helpers import cd from typing import Optional, Tuple logger = logging.getLogger(__name__) def version_project(commit_type: CommitType) -> Tuple[bool, Optional[str]]: """Versions the project. Args: commit_type: states what is the type of the commit Returns: (is new version, the new version) """ use_news_files = commit_type in [CommitType.BETA, CommitType.RELEASE] is_new_version, new_version = _calculate_version(commit_type, use_news_files) _generate_changelog(new_version, use_news_files) return is_new_version, new_version def _calculate_version(commit_type: CommitType, use_news_files: bool) -> Tuple[bool, Optional[str]]: """Calculates the version for the release. eg. "0.1.2" Args: commit_type: use_news_files: Should the version be dependant on changes recorded in news files Returns: Tuple containing a flag stating whether it is a new version or not A semver-style version for the latest release """ BUMP_TYPES = {CommitType.DEVELOPMENT: "build", CommitType.BETA: "prerelease"} is_release = commit_type == CommitType.RELEASE enable_file_triggers = True if use_news_files else None bump = BUMP_TYPES.get(commit_type) project_config_path = configuration.get_value(ConfigurationVariable.PROJECT_CONFIG) new_version: Optional[str] = None is_new_version: bool = False with cd(os.path.dirname(project_config_path)): old, _, updates = auto_version_tool.main( release=is_release, enable_file_triggers=enable_file_triggers, commit_count_as=bump, config_path=project_config_path, ) # Autoversion second returned value is not actually the new version # There seem to be a bug in autoversion. # This is why the following needs to be done to determine the version new_version = updates["__version__"] is_new_version = old != new_version logger.info(":: Determining the new version") logger.info(f"Version: {new_version}") return is_new_version, new_version def _generate_changelog(version: Optional[str], use_news_files: bool) -> None: """Creates a towncrier log of the release. Will only create a log entry if we are using news files. Args: version: the semver version of the release use_news_files: are we generating the release from news files """ if use_news_files: logger.info(":: Generating a new changelog") project_config_path = configuration.get_value(ConfigurationVariable.PROJECT_CONFIG) with cd(os.path.dirname(project_config_path)): subprocess.check_call(["towncrier", "--yes", '--name=""', f'--version="{version}"']) def main() -> None: """Handle command line arguments to generate a version and changelog file.""" parser = argparse.ArgumentParser(description="Versions the project.") parser.add_argument( "-t", "--release-type", help="type of release to perform", required=True, type=str, choices=CommitType.choices() ) parser.add_argument("-v", "--verbose", action="count", default=0, help="Verbosity, by default errors are reported.") args = parser.parse_args() set_log_level(args.verbose) try: version_project(CommitType.parse(args.release_type)) except Exception as e: log_exception(logger, e) sys.exit(1) if __name__ == "__main__": main()
36.657658
120
0.715409
4a14cdde461f13f4c8db47b5a229d68b0d53d472
1,809
py
Python
lf1.py
aba450/lambdapipelien
448eab7c1392204f8d76788bce58bf41b12feda5
[ "MIT" ]
null
null
null
lf1.py
aba450/lambdapipelien
448eab7c1392204f8d76788bce58bf41b12feda5
[ "MIT" ]
null
null
null
lf1.py
aba450/lambdapipelien
448eab7c1392204f8d76788bce58bf41b12feda5
[ "MIT" ]
null
null
null
import json import logging import boto3 import os from datetime import datetime import time import urllib.parse # from requests_aws4auth import AWS4Auth from elasticsearch import Elasticsearch, RequestsHttpConnection logger = logging.getLogger() logger.setLevel(logging.DEBUG) rekognition = boto3.client('rekognition') vpcEndPoint = 'vpc-photos-4kiie3w6likagxncic65ld3v7y.us-east-1.es.amazonaws.com' # region = 'us-east-1' # service = 'es' # credentials = boto3.Session().get_credentials() # awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service) es = Elasticsearch( hosts = [{'host': vpcEndPoint, 'port': 443}], use_ssl = True, verify_certs = True, connection_class = RequestsHttpConnection ) # s3 = boto3.client('s3') def lambda_handler(event, context): os.environ['TZ'] = 'America/New_York' time.tzset() records = event['Records'] for record in records: s3Object = record['s3'] bucket = s3Object['bucket']['name'] objectKey = urllib.parse.unquote_plus(s3Object['object']['key']) image = { 'S3Object': { 'Bucket': bucket, 'Name': objectKey } } response = rekognition.detect_labels(Image=image) labels = list(map(lambda x:x['Name'],response['Labels'])) timestamp = datetime.now().strftime('%Y-%d-%mT%H:%M:%S') esObject = json.dumps({ 'objectKey' : objectKey, 'bucket' : bucket, 'createdTimestamp' : timestamp, 'labels' : labels }) es.index(index="photos", doc_type="Photo", id=objectKey, body=esObject, refresh=True) # TODO implement return { 'statusCode': 200, 'body': json.dumps('Hello from Lambda!') }
25.125
93
0.630735
4a14cfecfa7521112478599165839ed0c69cf45b
5,606
py
Python
indigorestwrapper/models.py
EdwardMoyse/django-indigorestwrapper
498de65fbf549519b2a70f5de01f06380da0fcbe
[ "MIT" ]
null
null
null
indigorestwrapper/models.py
EdwardMoyse/django-indigorestwrapper
498de65fbf549519b2a70f5de01f06380da0fcbe
[ "MIT" ]
null
null
null
indigorestwrapper/models.py
EdwardMoyse/django-indigorestwrapper
498de65fbf549519b2a70f5de01f06380da0fcbe
[ "MIT" ]
null
null
null
# This is an auto-generated Django model module. # You'll have to do the following manually to clean this up: # * Rearrange models' order # * Make sure each model has one field with primary_key=True # * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table # Feel free to rename the models, but don't rename db_table values or field names. # # Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]' # into your database. from __future__ import unicode_literals from django.db import models class Device(models.Model): modelpk = models.AutoField(primary_key=True) lastChangedTimeStr = models.CharField(max_length=32) # lastChangedRFC822 = models.DateTimeField() typeSupportsHVAC = models.BooleanField() hasStateToDisplay = models.BooleanField() typeSupportsEnergyMeter = models.BooleanField() typeSupportsIO = models.BooleanField() id = models.IntegerField() # id in indigo JSON typeFlags = models.IntegerField() typeSupportsOnOff = models.BooleanField() addressStr = models.CharField(max_length=32) typeSupportsSensorValue = models.BooleanField() type = models.CharField(max_length=128) classID = models.IntegerField() displayRawState = models.CharField(max_length=32) typeSupportsSpeedControl = models.BooleanField() displayInUI = models.BooleanField() displayLongState = models.CharField(max_length=32) restParent = models.CharField(max_length=32) address = models.IntegerField() versByte = models.IntegerField() name = models.CharField(max_length=128) lastChanged = models.IntegerField() typeSupportsDim = models.BooleanField() lastChangedDateStr = models.DateField() lastChangedRFC3339 = models.DateTimeField() devProtocol = models.IntegerField() folderID = models.IntegerField() typeSupportsSprinkler = models.BooleanField() def __unicode__(self): return '%i %s' % (self.id, self.name) #Type 1: class Device1History(models.Model): id = models.IntegerField(primary_key=True) # AutoField? ts = models.DateTimeField(blank=True, null=True) # This field type is a guess. sensitivity = models.IntegerField(blank=True, null=True) state = models.IntegerField(blank=True, null=True) state_active = models.NullBooleanField() state_disconnected = models.NullBooleanField() state_passive = models.NullBooleanField() state_preparing = models.NullBooleanField() state_unavailable = models.NullBooleanField() type = models.TextField(blank=True, null=True) def __unicode__(self): # return '%i %s %i %i %i %i %i %i %i %i %s' % (self.id, self.ts, self.sensitivity, self.state, self.state_active, self.state_disconnected, self.state_passive, self.state_preparing, self.state_unavailable, self.type ) print self.id, self.ts, self.sensitivity, self.state, self.state_active, self.state_disconnected, print self.state_unavailable, "A", self.type, "B" print self.state_preparing print "TEST",self.state_passive,'TEST' return "" # def __init__(self, db_table): # print 'init with db_table',db_table # self._meta.db_table = db_table class Meta: managed = False db_table = '' #Type 2: class Device2History(models.Model): id = models.IntegerField(primary_key=True) # AutoField? ts = models.DateTimeField(blank=True, null=True) # This field type is a guess. onoffstate = models.NullBooleanField() class Meta: managed = False db_table = '' #Type 3: class Device3History(models.Model): id = models.IntegerField(primary_key=True) # AutoField? ts = models.DateTimeField(blank=True, null=True) # This field type is a guess. onoffstate = models.NullBooleanField() sensorvalue = models.IntegerField(blank=True, null=True) sensorvalue_ui = models.TextField(blank=True, null=True) class Meta: managed = False db_table = '' #Type 4: class Device4History(models.Model): id = models.IntegerField(primary_key=True) # AutoField? ts = models.DateTimeField(blank=True, null=True) # This field type is a guess. onoffstate = models.NullBooleanField() batterylevel = models.IntegerField(blank=True, null=True) batterylevel_ui = models.TextField(blank=True, null=True) class Meta: managed = False db_table = '' #Type 5: class Device5History(models.Model): id = models.IntegerField(primary_key=True) # AutoField? ts = models.DateTimeField(blank=True, null=True) # This field type is a guess. sensorvalue = models.IntegerField(blank=True, null=True) sensorvalue_ui = models.TextField(blank=True, null=True) class Meta: managed = False db_table = '' #Type 6: class Device6History(models.Model): id = models.IntegerField(primary_key=True) # AutoField? ts = models.DateTimeField(blank=True, null=True) # This field type is a guess. accumenergytimedelta = models.IntegerField(blank=True, null=True) accumenergytimedelta_ui = models.TextField(blank=True, null=True) accumenergytotal = models.IntegerField(blank=True, null=True) accumenergytotal_ui = models.TextField(blank=True, null=True) brightnesslevel = models.IntegerField(blank=True, null=True) curenergylevel = models.IntegerField(blank=True, null=True) curenergylevel_ui = models.TextField(blank=True, null=True) onoffstate = models.NullBooleanField() class Meta: managed = False db_table = ''
41.525926
224
0.711381
4a14d0914dabd1b8e06744d59e9a782fa41c3070
1,066
py
Python
src/backend/aspen/app/views/usher.py
chanzuckerberg/aspen
9853778a7ef68b0446751657af5a835f98dde3dc
[ "MIT" ]
5
2021-02-04T20:18:46.000Z
2021-09-09T13:42:42.000Z
src/backend/aspen/app/views/usher.py
chanzuckerberg/aspen
9853778a7ef68b0446751657af5a835f98dde3dc
[ "MIT" ]
422
2021-01-30T04:16:00.000Z
2022-01-31T23:18:44.000Z
src/backend/aspen/app/views/usher.py
chanzuckerberg/covidr
afe05d703d30ec18ac83944bfb551c313cb216c4
[ "MIT" ]
1
2021-05-20T14:54:39.000Z
2021-05-20T14:54:39.000Z
"""Views for handling anything related to UShER""" from typing import Iterable from flask import g, jsonify from aspen.app.app import application, requires_auth from aspen.app.serializers import UsherOptionResponseSchema from aspen.database.models.usher import UsherOption @application.route("/api/usher/tree_options", methods=["GET"]) @requires_auth def get_usher_tree_options(): """Gets all options user can pick from when creating tree via UShER. Transparent view of all info on each option in database. Returned in order of priority. First in options list is highest priority, which is what should be the default selection and first offered option. """ usher_options: Iterable[UsherOption] = ( g.db_session.query(UsherOption) .order_by( # Lowest priority is most important, lessens as priority ascends UsherOption.priority.asc() ) .all() ) options_schema = UsherOptionResponseSchema(many=True) return jsonify({"usher_options": options_schema.dump(usher_options)})
34.387097
77
0.731707
4a14d0c1d056f37f584e565dbe7c4f1ecdee1a39
3,212
py
Python
mgs/v1.2/rocksat_rx.py
vt-rocksat-2017/dashboard
e99a71edc74dd8b7f3eec023c381524561a7b6e4
[ "MIT" ]
1
2017-08-09T19:57:38.000Z
2017-08-09T19:57:38.000Z
mgs/v1.2/rocksat_rx.py
vt-rocksat-2017/dashboard
e99a71edc74dd8b7f3eec023c381524561a7b6e4
[ "MIT" ]
null
null
null
mgs/v1.2/rocksat_rx.py
vt-rocksat-2017/dashboard
e99a71edc74dd8b7f3eec023c381524561a7b6e4
[ "MIT" ]
null
null
null
#!/usr/bin/env python ######################################### # Title: Rocksat Telemetry Dashboard # # Project: Rocksat-X Competition # # Version: 1.1 # # Date: Jul 06, 2016 # # Author: Zach Leffke, KJ4QLP # # Comment: This is the initial version # ######################################### import math import string import time import sys import os import socket import threading import datetime as dt from optparse import OptionParser from data_server import * from adsb_thread import * from ais_thread import * from hw_thread import * def main(): start_ts = dt.datetime.utcnow().strftime("%Y%m%d_%H%M%S.%f") #--------START Command Line option parser------------------------------------------------------ usage = "usage: %prog " parser = OptionParser(usage = usage) #Main Parameters h_ts = "Startup Timestamp: [default=%default]" h_ip = "Set Rocksat Receiver Modem IP [default=%default]" h_port = "Set Rocksat Receiver Modem Port [default=%default]" h_loc = "Set Rocksat Receiver ID [default=%default]" parser.add_option("-t", "--ts" , dest="ts" , type="string", default=start_ts , help=h_ts) parser.add_option("-a", "--ip" , dest="ip" , type="string", default="0.0.0.0" , help=h_ip) parser.add_option("-p", "--port", dest="port" , type="int" , default="52003" , help=h_port) parser.add_option("-i", "--id" , dest="id" , type="string", default="VTGS" , help=h_loc) #ADSB Parameters h_adsb_ip = "Set Virtual Radar Server IP [default=%default]" h_adsb_port = "Set Virtual Radar Server Port [default=%default]" parser.add_option("-b", dest = "adsb_ip" , action = "store", type = "string", default='127.0.0.1', help = h_adsb_ip) parser.add_option("-c", dest = "adsb_port" , action = "store", type = "int" , default='30003' , help = h_adsb_port) #AIS Parameters h_ais_ip = "Set OpenCPN IP [default=%default]" h_ais_port = "Set OpenCPN Port [default=%default]" parser.add_option("-d", dest = "ais_ip" , action = "store", type = "string", default='127.0.0.1', help = h_adsb_ip) parser.add_option("-e", dest = "ais_port" , action = "store", type = "int" , default='2948' , help = h_adsb_port) (options, args) = parser.parse_args() #--------END Command Line option parser------------------------------------------------------ os.system('reset') lock = threading.Lock() adsb_thread = ADSB_Thread(options) adsb_thread.daemon = True adsb_thread.start() #non blocking ais_thread = AIS_Thread(options) ais_thread.daemon = True ais_thread.start() #non blocking hw_thread = HW_Thread(options) hw_thread.daemon = True hw_thread.start() #blocking #server_thread = Data_Server(options, lock) server_thread = Data_Server(options) server_thread.daemon = True server_thread.set_adsb_callback(adsb_thread) server_thread.set_ais_callback(ais_thread) server_thread.set_hw_callback(hw_thread) server_thread.run() #blocking #server_thread.start() #Non-blocking sys.exit() if __name__ == '__main__': main()
36.5
124
0.606476
4a14d10225d2b72a35fde74f0cfa12e2bab6fe53
4,562
py
Python
Pytorch/NLP/models/seq2seq/Encoder.py
Kuga23/Deep-Learning
86980338208c702b6bfcbcfffdb18498e389a56b
[ "MIT" ]
null
null
null
Pytorch/NLP/models/seq2seq/Encoder.py
Kuga23/Deep-Learning
86980338208c702b6bfcbcfffdb18498e389a56b
[ "MIT" ]
null
null
null
Pytorch/NLP/models/seq2seq/Encoder.py
Kuga23/Deep-Learning
86980338208c702b6bfcbcfffdb18498e389a56b
[ "MIT" ]
null
null
null
import random import torch import torch.nn as nn import torch.optim as optim class Encoder(nn.Module): """ The Encoder module of the Seq2Seq model You will need to complete the init function and the forward function. """ def __init__(self, input_size, emb_size, encoder_hidden_size, decoder_hidden_size, dropout = 0.2, model_type = "RNN"): super(Encoder, self).__init__() self.input_size = input_size self.emb_size = emb_size self.encoder_hidden_size = encoder_hidden_size self.decoder_hidden_size = decoder_hidden_size self.model_type = model_type ############################################################################# # TODO: # # Initialize the following layers of the encoder in this order!: # # 1) An embedding layer # # 2) A recurrent layer, this part is controlled by the "model_type" # # argument. You need to support the following type(in string): # # "RNN" and "LSTM". # # 3) Linear layers with ReLU activation in between to get the # # hidden weights of the Encoder(namely, Linear - ReLU - Linear). # # The size of the output of the first linear layer is the same as # # its input size. # # HINT: the size of the output of the second linear layer must # # satisfy certain constraint relevant to the decoder. # # 4) A dropout layer # ############################################################################# self.embedding= torch.nn.Embedding(self.input_size,self.emb_size) self.rec_layer= torch.nn.RNN(self.emb_size,self.encoder_hidden_size,1,batch_first=True) if self.model_type=="LSTM": self.rec_layer= torch.nn.LSTM(self.emb_size,self.encoder_hidden_size,1,batch_first=True) self.linear1=torch.nn.Linear(self.encoder_hidden_size,self.encoder_hidden_size) self.relu_lay = torch.nn.ReLU() self.linear2=torch.nn.Linear(self.encoder_hidden_size,self.decoder_hidden_size) self.dropout_lay = torch.nn.Dropout(p=dropout); ############################################################################# # END OF YOUR CODE # ############################################################################# def forward(self, input): """ The forward pass of the encoder Args: input (tensor): the encoded sequences of shape (batch_size, seq_len, input_size) Returns: output (tensor): the output of the Encoder; later fed into the Decoder. hidden (tensor): the weights coming out of the last hidden unit """ ############################################################################# # TODO: Implement the forward pass of the encoder. # # Apply the dropout to the embedding layer before you apply the # # recurrent layer # # Apply tanh activation to the hidden tensor before returning it # ############################################################################# #b,s= input.shape #inp = input.reshape(s,b) output, hidden = None, None emb= self.dropout_lay(self.embedding(input)) output,hidden= self.rec_layer(emb) if self.model_type=="RNN": hidden=self.linear2(self.relu_lay(self.linear1(hidden))) hidden= torch.tanh(hidden) else: newh=self.linear2(self.relu_lay(self.linear1(hidden[0]))) #newc=self.linear2(self.relu_lay(self.linear1(hidden[1]))) newh= torch.tanh(newh) #newc= torch.tanh(newc) hidden=(newh,hidden[1]) ############################################################################# # END OF YOUR CODE # ############################################################################# return output, hidden
54.309524
122
0.45594
4a14d135aadcbd777e20dfcc09f9fed7166f4d3d
3,156
py
Python
example/mnist_cnn_one_iteration.py
johaahn/keras2cpp
697481bb5b27a9428e79be613ec6ab41a3eda3c4
[ "MIT" ]
null
null
null
example/mnist_cnn_one_iteration.py
johaahn/keras2cpp
697481bb5b27a9428e79be613ec6ab41a3eda3c4
[ "MIT" ]
null
null
null
example/mnist_cnn_one_iteration.py
johaahn/keras2cpp
697481bb5b27a9428e79be613ec6ab41a3eda3c4
[ "MIT" ]
null
null
null
''' Save CNN network and one sample of train data. Run one iteration of training of convnet on the MNIST dataset. ''' from __future__ import print_function import numpy as np np.random.seed(1337) # for reproducibility from keras.datasets import mnist from keras.models import Sequential from keras.layers.core import Dense, Dropout, Activation, Flatten from keras.layers.convolutional import Convolution2D, MaxPooling2D from keras.utils import np_utils from keras import backend as K batch_size = 128 nb_classes = 10 nb_epoch = 1 # input image dimensions img_rows, img_cols = 28, 28 # number of convolutional filters to use nb_filters = 4 # size of pooling area for max pooling nb_pool = 2 # convolution kernel size nb_conv = 3 # the data, shuffled and split between train and test sets (X_train, y_train), (X_test, y_test) = mnist.load_data() X_train = X_train.reshape(X_train.shape[0], img_rows, img_cols, 1) X_test = X_test.reshape(X_test.shape[0], img_rows, img_cols, 1) X_train = X_train.astype('float32') X_test = X_test.astype('float32') X_train /= 255 X_test /= 255 print('X_train shape:', X_train.shape) print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') # convert class vectors to binary class matrices Y_train = np_utils.to_categorical(y_train, nb_classes) Y_test = np_utils.to_categorical(y_test, nb_classes) model = Sequential() a = Convolution2D(nb_filters, nb_conv, nb_conv, border_mode='same', input_shape=(img_rows, img_cols, 1)) model.add(a) model.add(Activation('relu')) model.add(Convolution2D(nb_filters, nb_conv, nb_conv, border_mode='same')) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(6)) model.add(Activation('relu')) model.add(Dropout(0.5)) model.add(Dense(nb_classes)) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='adadelta') model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=1, validation_data=(X_test, Y_test)) # store model with open('./my_nn_arch.json', 'w') as fout: fout.write(model.to_json()) model.save_weights('./my_nn_weights.h5', overwrite=True) # store one sample in text file with open("./sample_mnist.dat", "w") as fin: fin.write("1 28 28\n") print(X_train.shape) a = X_train[500,:,:,0] for b in a: fin.write(str(b)+'\n') # get prediction on saved sample # c++ output should be the same ;) print('Prediction on saved sample:') print(X_train.shape) print(X_train[None,500,:,:,:].shape) print(str(model.predict(X_train[None,500,:,:,:]))) inp = model.input outputs = [layer.output for layer in model.layers] # all layer outputs functor = K.function([inp]+ [K.learning_phase()], outputs ) # evaluation function # Testing test = X_train[None,500,:,:,:] layer_outs = functor([test]) print(layer_outs) print(str(model.predict(test))) print(str(model.predict(test))) # on my pc I got: #[[ 0.03729606 0.00783805 0.06588034 0.21728528 0.01093729 0.34730983 # 0.01350389 0.02174525 0.26624694 0.01195715]]
29.495327
81
0.72782
4a14d148207b86f7b5242b7409338831e45e3579
192
py
Python
ask_name.py
AHannebelle/snips-givename
f84455d37c8789ef7531d949b5f556dac4030327
[ "MIT" ]
null
null
null
ask_name.py
AHannebelle/snips-givename
f84455d37c8789ef7531d949b5f556dac4030327
[ "MIT" ]
null
null
null
ask_name.py
AHannebelle/snips-givename
f84455d37c8789ef7531d949b5f556dac4030327
[ "MIT" ]
null
null
null
import requests URL = "http://pinoc.serveo.net/name" def ask_name_action(name): requests.get(url="{}/{}".format(URL, name)) response = "Bonjour {}".format(name) return response
19.2
47
0.661458
4a14d2ad11765744fe6cd660dab6187841dc5cf8
20,249
py
Python
tests/test_http.py
lsst/resources
f1bf1f77a07a3072a589e6e10d0456020a77fc45
[ "BSD-3-Clause" ]
null
null
null
tests/test_http.py
lsst/resources
f1bf1f77a07a3072a589e6e10d0456020a77fc45
[ "BSD-3-Clause" ]
11
2021-12-07T17:19:19.000Z
2022-03-31T05:26:36.000Z
tests/test_http.py
lsst/resources
f1bf1f77a07a3072a589e6e10d0456020a77fc45
[ "BSD-3-Clause" ]
null
null
null
# This file is part of lsst-resources. # # Developed for the LSST Data Management System. # This product includes software developed by the LSST Project # (https://www.lsst.org). # See the COPYRIGHT file at the top-level directory of this distribution # for details of code ownership. # # Use of this source code is governed by a 3-clause BSD-style # license that can be found in the LICENSE file. import importlib import os.path import stat import tempfile import unittest import lsst.resources import requests import responses from lsst.resources import ResourcePath from lsst.resources.http import BearerTokenAuth, SessionStore, _is_protected, _is_webdav_endpoint from lsst.resources.tests import GenericTestCase from lsst.resources.utils import makeTestTempDir, removeTestTempDir TESTDIR = os.path.abspath(os.path.dirname(__file__)) class GenericHttpTestCase(GenericTestCase, unittest.TestCase): scheme = "http" netloc = "server.example" class HttpReadWriteTestCase(unittest.TestCase): """Specialist test cases for WebDAV server. The responses class requires that every possible request be explicitly mocked out. This currently makes it extremely inconvenient to subclass the generic read/write tests shared by other URI schemes. For now use explicit standalone tests. """ def setUp(self): # Local test directory self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) serverRoot = "www.not-exists.orgx" existingFolderName = "existingFolder" existingFileName = "existingFile" notExistingFileName = "notExistingFile" self.baseURL = ResourcePath(f"https://{serverRoot}", forceDirectory=True) self.existingFileResourcePath = ResourcePath( f"https://{serverRoot}/{existingFolderName}/{existingFileName}" ) self.notExistingFileResourcePath = ResourcePath( f"https://{serverRoot}/{existingFolderName}/{notExistingFileName}" ) self.existingFolderResourcePath = ResourcePath( f"https://{serverRoot}/{existingFolderName}", forceDirectory=True ) self.notExistingFolderResourcePath = ResourcePath( f"https://{serverRoot}/{notExistingFileName}", forceDirectory=True ) # Need to declare the options responses.add(responses.OPTIONS, self.baseURL.geturl(), status=200, headers={"DAV": "1,2,3"}) # Used by HttpResourcePath.exists() responses.add( responses.HEAD, self.existingFileResourcePath.geturl(), status=200, headers={"Content-Length": "1024"}, ) responses.add(responses.HEAD, self.notExistingFileResourcePath.geturl(), status=404) # Used by HttpResourcePath.read() responses.add( responses.GET, self.existingFileResourcePath.geturl(), status=200, body=str.encode("It works!") ) responses.add(responses.GET, self.notExistingFileResourcePath.geturl(), status=404) # Used by HttpResourcePath.write() responses.add(responses.PUT, self.existingFileResourcePath.geturl(), status=201) # Used by HttpResourcePath.transfer_from() responses.add( responses.Response( url=self.existingFileResourcePath.geturl(), method="COPY", headers={"Destination": self.existingFileResourcePath.geturl()}, status=201, ) ) responses.add( responses.Response( url=self.existingFileResourcePath.geturl(), method="COPY", headers={"Destination": self.notExistingFileResourcePath.geturl()}, status=201, ) ) responses.add( responses.Response( url=self.existingFileResourcePath.geturl(), method="MOVE", headers={"Destination": self.notExistingFileResourcePath.geturl()}, status=201, ) ) # Used by HttpResourcePath.remove() responses.add(responses.DELETE, self.existingFileResourcePath.geturl(), status=200) responses.add(responses.DELETE, self.notExistingFileResourcePath.geturl(), status=404) # Used by HttpResourcePath.mkdir() responses.add( responses.HEAD, self.existingFolderResourcePath.geturl(), status=200, headers={"Content-Length": "1024"}, ) responses.add(responses.HEAD, self.baseURL.geturl(), status=200, headers={"Content-Length": "1024"}) responses.add(responses.HEAD, self.notExistingFolderResourcePath.geturl(), status=404) responses.add( responses.Response(url=self.notExistingFolderResourcePath.geturl(), method="MKCOL", status=201) ) responses.add( responses.Response(url=self.existingFolderResourcePath.geturl(), method="MKCOL", status=403) ) # Used by HttpResourcePath._do_put() self.redirectPathNoExpect = ResourcePath(f"https://{serverRoot}/redirect-no-expect/file") self.redirectPathExpect = ResourcePath(f"https://{serverRoot}/redirect-expect/file") redirected_url = f"https://{serverRoot}/redirect/location" responses.add( responses.PUT, self.redirectPathNoExpect.geturl(), headers={"Location": redirected_url}, status=307, ) responses.add( responses.PUT, self.redirectPathExpect.geturl(), headers={"Location": redirected_url}, status=307, match=[responses.matchers.header_matcher({"Content-Length": "0", "Expect": "100-continue"})], ) responses.add(responses.PUT, redirected_url, status=202) def tearDown(self): if self.tmpdir: if self.tmpdir.isLocal: removeTestTempDir(self.tmpdir.ospath) @responses.activate def test_exists(self): self.assertTrue(self.existingFileResourcePath.exists()) self.assertFalse(self.notExistingFileResourcePath.exists()) self.assertEqual(self.existingFileResourcePath.size(), 1024) with self.assertRaises(FileNotFoundError): self.notExistingFileResourcePath.size() @responses.activate def test_remove(self): self.assertIsNone(self.existingFileResourcePath.remove()) with self.assertRaises(FileNotFoundError): self.notExistingFileResourcePath.remove() url = "https://example.org/delete" responses.add(responses.DELETE, url, status=404) with self.assertRaises(FileNotFoundError): ResourcePath(url).remove() @responses.activate def test_mkdir(self): # The mock means that we can't check this now exists self.notExistingFolderResourcePath.mkdir() # This should do nothing self.existingFolderResourcePath.mkdir() with self.assertRaises(ValueError): self.notExistingFileResourcePath.mkdir() @responses.activate def test_read(self): self.assertEqual(self.existingFileResourcePath.read().decode(), "It works!") self.assertNotEqual(self.existingFileResourcePath.read().decode(), "Nope.") with self.assertRaises(FileNotFoundError): self.notExistingFileResourcePath.read() # Run this twice to ensure use of cache in code coverage. for _ in (1, 2): with self.existingFileResourcePath.as_local() as local_uri: self.assertTrue(local_uri.isLocal) content = local_uri.read().decode() self.assertEqual(content, "It works!") # Check that the environment variable is being read. lsst.resources.http._TMPDIR = None with unittest.mock.patch.dict(os.environ, {"LSST_RESOURCES_TMPDIR": self.tmpdir.ospath}): with self.existingFileResourcePath.as_local() as local_uri: self.assertTrue(local_uri.isLocal) content = local_uri.read().decode() self.assertEqual(content, "It works!") self.assertIsNotNone(local_uri.relative_to(self.tmpdir)) @responses.activate def test_write(self): self.assertIsNone(self.existingFileResourcePath.write(data=str.encode("Some content."))) with self.assertRaises(FileExistsError): self.existingFileResourcePath.write(data=str.encode("Some content."), overwrite=False) url = "https://example.org/put" responses.add(responses.PUT, url, status=404) with self.assertRaises(ValueError): ResourcePath(url).write(data=str.encode("Some content.")) @responses.activate def test_do_put_with_redirection(self): # Without LSST_HTTP_PUT_SEND_EXPECT_HEADER. os.environ.pop("LSST_HTTP_PUT_SEND_EXPECT_HEADER", None) importlib.reload(lsst.resources.http) body = str.encode("any contents") self.assertIsNone(self.redirectPathNoExpect._do_put(data=body)) # With LSST_HTTP_PUT_SEND_EXPECT_HEADER. with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_PUT_SEND_EXPECT_HEADER": "True"}, clear=True): importlib.reload(lsst.resources.http) self.assertIsNone(self.redirectPathExpect._do_put(data=body)) @responses.activate def test_transfer(self): # Transferring to self should be no-op. self.existingFileResourcePath.transfer_from(src=self.existingFileResourcePath) self.assertIsNone(self.notExistingFileResourcePath.transfer_from(src=self.existingFileResourcePath)) # Should test for existence. # self.assertTrue(self.notExistingFileResourcePath.exists()) # Should delete and try again with move. # self.notExistingFileResourcePath.remove() self.assertIsNone( self.notExistingFileResourcePath.transfer_from(src=self.existingFileResourcePath, transfer="move") ) # Should then check that it was moved. # self.assertFalse(self.existingFileResourcePath.exists()) # Existing file resource should have been removed so this should # trigger FileNotFoundError. # with self.assertRaises(FileNotFoundError): # self.notExistingFileResourcePath.transfer_from(src=self.existingFileResourcePath) with self.assertRaises(ValueError): self.notExistingFileResourcePath.transfer_from( src=self.existingFileResourcePath, transfer="unsupported" ) def test_parent(self): self.assertEqual( self.existingFolderResourcePath.geturl(), self.notExistingFileResourcePath.parent().geturl() ) self.assertEqual(self.baseURL.geturl(), self.baseURL.parent().geturl()) self.assertEqual( self.existingFileResourcePath.parent().geturl(), self.existingFileResourcePath.dirname().geturl() ) def test_send_expect_header(self): # Ensure _SEND_EXPECT_HEADER_ON_PUT is correctly initialized from # the environment. os.environ.pop("LSST_HTTP_PUT_SEND_EXPECT_HEADER", None) importlib.reload(lsst.resources.http) self.assertFalse(lsst.resources.http._SEND_EXPECT_HEADER_ON_PUT) with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_PUT_SEND_EXPECT_HEADER": "true"}, clear=True): importlib.reload(lsst.resources.http) self.assertTrue(lsst.resources.http._SEND_EXPECT_HEADER_ON_PUT) def test_timeout(self): connect_timeout = 100 read_timeout = 200 with unittest.mock.patch.dict( os.environ, {"LSST_HTTP_TIMEOUT_CONNECT": str(connect_timeout), "LSST_HTTP_TIMEOUT_READ": str(read_timeout)}, clear=True, ): # Force module reload to initialize TIMEOUT. importlib.reload(lsst.resources.http) self.assertEqual(lsst.resources.http.TIMEOUT, (connect_timeout, read_timeout)) def test_is_protected(self): self.assertFalse(_is_protected("/this-file-does-not-exist")) with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: f.write("XXXX") file_path = f.name os.chmod(file_path, stat.S_IRUSR) self.assertTrue(_is_protected(file_path)) for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): os.chmod(file_path, stat.S_IRUSR | mode) self.assertFalse(_is_protected(file_path)) class WebdavUtilsTestCase(unittest.TestCase): """Test for the Webdav related utilities.""" serverRoot = "www.lsstwithwebdav.orgx" wrongRoot = "www.lsstwithoutwebdav.org" def setUp(self): responses.add(responses.OPTIONS, f"https://{self.serverRoot}", status=200, headers={"DAV": "1,2,3"}) responses.add(responses.OPTIONS, f"https://{self.wrongRoot}", status=200) @responses.activate def test_is_webdav_endpoint(self): self.assertTrue(_is_webdav_endpoint(f"https://{self.serverRoot}")) self.assertFalse(_is_webdav_endpoint(f"https://{self.wrongRoot}")) class BearerTokenAuthTestCase(unittest.TestCase): """Test for the BearerTokenAuth class.""" def setUp(self): self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) self.token = "ABCDE1234" def tearDown(self): if self.tmpdir and self.tmpdir.isLocal: removeTestTempDir(self.tmpdir.ospath) def test_empty_token(self): """Ensure that when no token is provided the request is not modified. """ auth = BearerTokenAuth(None) auth._refresh() self.assertIsNone(auth._token) self.assertIsNone(auth._path) req = requests.Request("GET", "https://example.org") self.assertEqual(auth(req), req) def test_token_value(self): """Ensure that when a token value is provided, the 'Authorization' header is added to the requests. """ auth = BearerTokenAuth(self.token) req = auth(requests.Request("GET", "https://example.org").prepare()) self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}") def test_token_file(self): """Ensure when the provided token is a file path, its contents is correctly used in the the 'Authorization' header of the requests. """ with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: f.write(self.token) token_file_path = f.name # Ensure the request's "Authorization" header is set with the right # token value os.chmod(token_file_path, stat.S_IRUSR) auth = BearerTokenAuth(token_file_path) req = auth(requests.Request("GET", "https://example.org").prepare()) self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}") # Ensure an exception is raised if either group or other can read the # token file for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): os.chmod(token_file_path, stat.S_IRUSR | mode) with self.assertRaises(PermissionError): BearerTokenAuth(token_file_path) class SessionStoreTestCase(unittest.TestCase): """Test for the SessionStore class.""" def setUp(self): self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) self.rpath = ResourcePath("https://example.org") def tearDown(self): if self.tmpdir and self.tmpdir.isLocal: removeTestTempDir(self.tmpdir.ospath) def test_ca_cert_bundle(self): """Ensure a certificate authorities bundle is used to authentify the remote server. """ with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: f.write("CERT BUNDLE") cert_bundle = f.name with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_CACERT_BUNDLE": cert_bundle}, clear=True): session = SessionStore().get(self.rpath) self.assertEqual(session.verify, cert_bundle) def test_user_cert(self): """Ensure if user certificate and private key are provided, they are used for authenticating the client. """ # Create mock certificate and private key files. with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: f.write("CERT") client_cert = f.name with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: f.write("KEY") client_key = f.name # Check both LSST_HTTP_AUTH_CLIENT_CERT and LSST_HTTP_AUTH_CLIENT_KEY # must be initialized. with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert}, clear=True): with self.assertRaises(ValueError): SessionStore().get(self.rpath) with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_KEY": client_key}, clear=True): with self.assertRaises(ValueError): SessionStore().get(self.rpath) # Check private key file must be accessible only by its owner. with unittest.mock.patch.dict( os.environ, {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert, "LSST_HTTP_AUTH_CLIENT_KEY": client_key}, clear=True, ): # Ensure the session client certificate is initialized when # only the owner can read the private key file. os.chmod(client_key, stat.S_IRUSR) session = SessionStore().get(self.rpath) self.assertEqual(session.cert[0], client_cert) self.assertEqual(session.cert[1], client_key) # Ensure an exception is raised if either group or other can access # the private key file. for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): os.chmod(client_key, stat.S_IRUSR | mode) with self.assertRaises(PermissionError): SessionStore().get(self.rpath) def test_token_env(self): """Ensure when the token is provided via an environment variable the sessions are equipped with a BearerTokenAuth. """ token = "ABCDE" with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_BEARER_TOKEN": token}, clear=True): session = SessionStore().get(self.rpath) self.assertEqual(type(session.auth), lsst.resources.http.BearerTokenAuth) self.assertEqual(session.auth._token, token) self.assertIsNone(session.auth._path) def test_sessions(self): """Ensure the session caching mechanism works.""" # Ensure the store provides a session for a given URL root_url = "https://example.org" store = SessionStore() session = store.get(ResourcePath(root_url)) self.assertIsNotNone(session) # Ensure the sessions retrieved from a single store with the same # root URIs are equal for u in (f"{root_url}", f"{root_url}/path/to/file"): self.assertEqual(session, store.get(ResourcePath(u))) # Ensure sessions retrieved for different root URIs are different another_url = "https://another.example.org" self.assertNotEqual(session, store.get(ResourcePath(another_url))) # Ensure the sessions retrieved from a single store for URLs with # different port numbers are different root_url_with_port = f"{another_url}:12345" session = store.get(ResourcePath(root_url_with_port)) self.assertNotEqual(session, store.get(ResourcePath(another_url))) # Ensure the sessions retrieved from a single store with the same # root URIs (including port numbers) are equal for u in (f"{root_url_with_port}", f"{root_url_with_port}/path/to/file"): self.assertEqual(session, store.get(ResourcePath(u))) if __name__ == "__main__": unittest.main()
40.579158
110
0.661366
4a14d2f5ecc513921c8547cc836d6e09ea28a370
7,004
py
Python
Experiments.py
anonymus369/Formalizing-Attribute-and-Membership-Inference
27671abb94126f018578f530b6b64f43cf77411f
[ "MIT" ]
null
null
null
Experiments.py
anonymus369/Formalizing-Attribute-and-Membership-Inference
27671abb94126f018578f530b6b64f43cf77411f
[ "MIT" ]
null
null
null
Experiments.py
anonymus369/Formalizing-Attribute-and-Membership-Inference
27671abb94126f018578f530b6b64f43cf77411f
[ "MIT" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 # In[ ]: import numpy as np from numpy import random as rd import math import torch import torch.nn as nn from torch.utils.data import DataLoader, Dataset from models import ConvNNCifar10, ConvNNMNIST cuda = True if torch.cuda.is_available() else False Tensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor def get_n_params(model): """Get the number of parameters in 'model' """ pp=0 for p in list(model.parameters()): nn=1 for s in list(p.size()): nn = nn*s pp += nn return pp def To1hot(in_tensor,num_class): """Output a 1 hot encoded version of 'in_tensor' """ if len(in_tensor.shape) == 1: in_tensor = torch.unsqueeze(in_tensor,1) onehot = Tensor(in_tensor.shape[0], num_class) onehot.zero_() onehot.scatter_(1, in_tensor, 1) return onehot def TrainingSet(n,X,Y,seed): """ Take n samples from (X,Y) uniformly and then split into training and validation set. """ lenX = X.shape[0] if n>lenX: raise ValueError # Randomly pick samples from the universe. rd.seed(seed) index = rd.choice(lenX,n*2//3,replace=False) trainX = X[index] trainY = Y[index] # The part of the universe not used for training is used as validation set. valIndex = np.setdiff1d(range(lenX),index) valIndex = rd.choice(valIndex,n//3,replace=False) valX = X[valIndex] valY = Y[valIndex] return trainX, trainY, valX, valY class TrainingDataMSE(Dataset): """Preprocess training data """ def __init__(self,X,Y): self.X = Tensor(X) indexY = Tensor(Y).to(dtype=torch.long) self.Y = To1hot(indexY,10) def __len__(self): return self.X.shape[0] def __getitem__(self,index): return (self.X[index,:,:,:],self.Y[index]) def TrainNN_MSE(n, trainX, trainY, valX, valY, mode, seed=5, batch_size=50, n_epochs=2500, epsilon=1e-3): """ Instantiates and trains a Neural network for classification. 'mode' indicates whether the model is designed for Cifar10 or the MNIST/FashionMNIST sets. n: total number of samples used for training (traning + validation). (trainX,trainY): training set. (ValX,ValY): Validation set. """ lr = 0.005 #Learning rate for the optimizer trainingData = TrainingDataMSE(trainX,trainY) valX, valY = Tensor(valX), Tensor(valY).to(dtype=torch.long) valY = To1hot(valY,10) dataloader = DataLoader(trainingData,batch_size=batch_size,shuffle=True) Len = trainX.shape[0] valLen = valX.shape[0] # Instantiating model, loss function and softmax if mode == 'Cifar10': NN = ConvNNCifar10(10) elif mode == 'MNIST': NN = ConvNNMNIST(10) Loss = nn.MSELoss() Soft = nn.Softmax(dim=1) if cuda: NN.cuda() Loss.cuda() Soft.cuda() optimizer = torch.optim.Adam(NN.parameters(),lr=lr) currLoss = math.inf for k in range(n_epochs): # Loop through epochs. lostList = [] Acc = 0 for i,batch in enumerate(dataloader): # Loop through batches. optimizer.zero_grad() example = batch[0] target = batch[1] loss = Loss(Soft(NN(example)),target) # Compute the loss. loss.backward() # Compute the gradient. optimizer.step() # Update the weights. aux = Tensor.cpu(sum(torch.eq(torch.argmax(NN(example),1),torch.argmax(target,1)))) Acc = Acc + aux.data.numpy() lostList.append(loss.item()) # Compute Accuracy over training set. Acc = Acc/Len # Compute Accuracy over validation set. aux = Tensor.cpu(torch.eq(torch.argmax(NN(valX),1),torch.argmax(valY,1))) valAcc = sum(aux.data.numpy())/valLen prevLoss = currLoss currLoss = np.mean(lostList) if (abs(prevLoss-currLoss) < epsilon): # Early stop criteria. break print('Loss : %f, Accuracy: %f, Validation Accuracy: %f Iteration: %d' % (currLoss, Acc, valAcc, k+1)) return NN def Experiment2(n,X,Y,testX,testY,seed,mode=None,precision=10000): """Draw a training set of size 'n' randomly, train a model and perform the likelihood attack 'precision' number of times on the trained model. The generalization error is computed empirically using training set (X,Y) and test set (testX,testY). mode: Indicates the model architecture to be initialized and trained. """ if mode is None: raise ValueError("mode must be 'Cifar10' or 'MNIST'") trainX, trainY, valX, valY = TrainingSet(n,X,Y,seed) NN = TrainNN_MSE(n,trainX,trainY,valX,valY,mode,seed=seed,n_epochs=150) trainX = np.concatenate((trainX,valX)) trainY = np.concatenate((trainY,valY)) with torch.no_grad(): trainXtensor, testXtensor = Tensor(trainX), Tensor(testX) trainYtensor, testYtensor = Tensor(trainY).to(torch.long), Tensor(testY).to(torch.long) Loss = nn.MSELoss(reduction='none') Soft = nn.Softmax(dim=1) if cuda: Loss.cuda() Soft.cuda() # Likelihood Attack likelihoodTrain = torch.max(Soft(NN(trainXtensor)),1)[0] likelihoodTrain = Tensor.cpu(likelihoodTrain) likelihoodTrain = likelihoodTrain.data.numpy() likelihoodTest = torch.max(Soft(NN(testXtensor)),1)[0] likelihoodTest = Tensor.cpu(likelihoodTest) likelihoodTest = likelihoodTest.data.numpy() threshold = .8 Suc = 0 for _ in range(precision): # Repeating the attack described in Algorithm 2, "precision" times. T = rd.randint(2) if T: j = rd.randint(len(likelihoodTrain)) S = likelihoodTrain[j] else: j = rd.randint(len(likelihoodTest)) S = likelihoodTest[j] Suc = Suc + int(int(S>threshold)==T) Suc = Suc/precision # Compute Generalization gap. trainY_onehot = To1hot(trainYtensor,10) trainErr = Tensor.cpu(torch.sum(Loss(Soft(NN(trainXtensor)),trainY_onehot),1)) trainErr = trainErr.data.numpy() testY_onehot = To1hot(testYtensor,10) testErr = Tensor.cpu(torch.sum(Loss(Soft(NN(testXtensor)),testY_onehot),1)) testErr = testErr.data.numpy() genErr = abs(np.mean(trainErr)-np.mean(testErr)) # Compute Accuracy on the Test Set Acc = Tensor.cpu(sum(torch.eq(torch.argmax(NN(testXtensor),1),torch.argmax(testY_onehot,1)))) Acc = Acc.data.numpy() Acc = Acc/testXtensor.shape[0] return genErr, Suc, Acc
33.194313
106
0.602656
4a14d33a2beae8de763559265f2c6779443f6ad1
31,380
py
Python
BioSTEAM 2.x.x/biorefineries/wheatstraw/_system.py
blsymens/Bioindustrial-Park
c1173646185d52f4b8d595ad088ade8e5216614d
[ "MIT" ]
18
2020-05-12T21:46:14.000Z
2022-03-14T00:35:35.000Z
BioSTEAM 2.x.x/biorefineries/wheatstraw/_system.py
yalinli2/Bioindustrial-Park
196e2d60ec9bf0466ef804d036c995b89bc72f72
[ "MIT" ]
24
2020-03-05T14:39:15.000Z
2022-03-25T22:24:50.000Z
BioSTEAM 2.x.x/biorefineries/wheatstraw/_system.py
yalinli2/Bioindustrial-Park
196e2d60ec9bf0466ef804d036c995b89bc72f72
[ "MIT" ]
9
2020-05-14T13:02:32.000Z
2022-03-27T19:41:07.000Z
# -*- coding: utf-8 -*- """ Created on Thu Jun 27 23:12:28 2019 @author: Joaquin """ from biosteam import System import biosteam as bst import thermosteam as tmo from thermosteam import Stream from biorefineries.wheatstraw._process_settings import price from biorefineries.wheatstraw._chemicals import chemical_groups from biorefineries.wheatstraw import units import thermosteam.reaction as rxn import numpy as np from biosteam.process_tools import BoundedNumericalSpecification from biorefineries import BST222 __all__ = ('create_system',) def find_split(IDs, flow0, flow1): flow0 = np.asarray(flow0) splits = flow0/(flow0 + np.asarray(flow1)) thermo = tmo.settings.get_thermo() chemicals = thermo.chemicals array = np.zeros(chemicals.size) for ID, split in zip(IDs, splits): if ID in chemical_groups: array[chemicals.get_index(chemical_groups[ID])] = split else: array[chemicals.index(ID)] = split return array def find_split_solids(stream,IDs): array = np.zeros(stream.chemicals.size) for ID in IDs: array[stream.chemicals.index(ID)] = 1 return array def find_WIS(stream, IDs): split=find_split_solids(stream,IDs) if stream.F_mass == 0: WIS = 0 else: WIS = sum(split*stream.mass)/stream.F_mass return WIS def find_TS(stream): TS = (stream.F_mass-stream.imass['Water'])/stream.F_mass return TS def create_system(ID='wheatstraw_sys'): if BST222: System.default_maxiter = 400 System.default_converge_method = 'Aitken' System.default_molar_tolerance = 0.01 else: System.maxiter = 400 System.converge_method = 'Aitken' System.molar_tolerance = 0.01 ### Streams chemicals = bst.settings.get_chemicals() non_soluble = ['Xylan','Glucan','Arabinan','Lignin','Extract','Ash','Mannan','Galactan','Acetate'] # feed flow drycomposition = chemicals.kwarray( dict(Glucan=0.3342, Xylan=0.2330, Arabinan=0.0420, Lignin=0.2260, Extract=0.1330, Ash=0.0180, Acetate=0.0130) ) TS=0.95 moisture_content = chemicals.kwarray( dict(Water=1-TS) ) dryflow = 83333.0 netflow = dryflow/TS feedflow = netflow*(drycomposition*TS + moisture_content) process_water_over_dryflow = 19.96 sulfuric_acid_over_dryflow = 0.04 wheatstraw = Stream('wheatstraw', feedflow, units='kg/hr', price=price['Feedstock']*TS) ### Pretreatment system process_water1 = Stream('process_water1', T=25+273.15, P=1*101325, Water=process_water_over_dryflow*dryflow,#only an initialization units='kg/hr') sulfuric_acid = Stream('sulfuric_acid', P=1*101325, T=25+273.15, Water=0.05*sulfuric_acid_over_dryflow*dryflow, SulfuricAcid=0.95*sulfuric_acid_over_dryflow*dryflow, units='kg/hr', price=price['Sulfuric acid']*0.95) steam = Stream('steam', phase='g', T=212+273.15, P=20*101325, Water=dryflow*0.5,#This is just a guess units='kg/hr') U101 = units.FeedStockHandling('U101', ins=wheatstraw) U101.cost_items['System'].cost = 0 T201 = units.SulfuricAcidTank('T201', ins=sulfuric_acid) M201 = bst.Mixer('M201', ins=(process_water1, T201-0,Stream())) M202 = units.WashingTank('M202', ins=(M201-0, U101-0)) S200 = units.SieveFilter('S200', ins=(M202-0), outs=(Stream('feed_20TS'),Stream('recycled_water1')),moisture_content=1-0.20,split=find_split_solids(M202-0,non_soluble)) S201 = units.PressureFilter('S201', ins=(S200-0), outs=(Stream('feed_50TS'),Stream('recycled_water2')),moisture_content=0.5,split=find_split_solids(S200-0,non_soluble)) M200 = bst.Mixer('M200', ins=(S200-1, S201-1),outs='recycled_water') M200-0-2-M201 recycled_water = M200-0 def update_process_water1(): process_water1.imass['Water'] = process_water_over_dryflow*dryflow - recycled_water.imass['Water'] sulfuric_acid.imass['SulfuricAcid']= 0.95*sulfuric_acid_over_dryflow*dryflow - recycled_water.imass['SulfuricAcid'] sulfuric_acid.imass['Water']= 0.05/0.95*sulfuric_acid.imass['SulfuricAcid'] water_recycle_sys = System('water_recycle_sys', path=(U101, T201, M201, M202, S200, S201, M200, update_process_water1), recycle=M201-0) M205 = bst.Mixer('M205', ins=(S201-0, None)) M203 = units.SteamMixer('M203', ins=(M205-0, steam),P=steam.chemicals.Water.Psat(190.0+273.15)) R201 = units.PretreatmentReactorSystem('R201', ins=M203-0,outs=(Stream('pretreatment_steam'),Stream('pretreatment_effluent'))) P201 = units.BlowdownDischargePump('P201', ins=R201-1) T202 = units.OligomerConversionTank('T202', ins=P201-0) F201 = units.PretreatmentFlash('F201', ins=T202-0,outs=(Stream('flash_steam'),Stream('flash_effluent')), P=101325, Q=0) M204 = bst.Mixer('M204', ins=(R201-0, F201-0)) S202 = units.PressureFilter('S202', ins=(F201-1), outs=(Stream('pretreated_stream'),Stream('pretreated_liquid')),moisture_content=0.5,split=find_split_solids(F201-1,non_soluble)) S203 = bst.Splitter('S203', ins=M204-0, outs=(Stream('steam_back'),Stream('residual_steam')), split=0.25) H201 = units.WasteVaporCondenser('H201', ins=S203-1, outs=Stream('condensed_steam'), T=99+273.15, V=0) S203-0-1-M205 steam_out1 = S203-1 steam_inS203 = 0-S203 steam_out0 = S203-0 def update_split(): steam_out1.mol[:] = steam_inS203.mol[:] - steam_out0.mol[:] pretreatment_sys = System('pretreatment_sys', path=(water_recycle_sys, M205, M203, R201, P201, T202, F201, M204, S202, S203, update_split, H201), # TODO: H201 moved to the end, no need to resimulate system recycle=M204-0) T90 = 90+273.15 def f_DSpret(split): S203.split[:] = split for i in range(3): pretreatment_sys.simulate() sobj=M205-0 return sobj.T-T90 pretreatment_sys.specification = BoundedNumericalSpecification(f_DSpret, 0.10, 0.70) ### Fermentation system cellulase_conc = 0.05 cellulase = Stream('cellulase', units='kg/hr', price=price['Enzyme']) ammonia = Stream('ammonia', Ammonia=1051/1000*dryflow,#This is just a initialization units='kg/hr', phase='l', price=price['Ammonia']) process_water2 = Stream('process_water2', T=10+273.15, P=1*101325, Water=1664.8/1000*dryflow,#This is just a guess units='kg/hr') ammonia1 = Stream('ammonia1', Ammonia=26/1000*dryflow,#This is just a initialization units='kg/hr', price=price['Ammonia']) ammonia2 = Stream('ammonia2', Ammonia=116/1000*dryflow,#This is just a initialization units='kg/hr', price=price['Ammonia']) ammonia_fresh = Stream('ammonia_fresh', units='kg/hr', price=price['Ammonia']) ammonia_storage = units.DAPTank('Ammonia_storage', ins=ammonia_fresh, outs='Ammonia_fermentation') S301 = bst.ReversedSplitter('S301', ins=ammonia_storage-0, outs=(ammonia,ammonia1, ammonia2)) air1 = Stream('air_lagoon1', O2=51061, N2=168162, phase='g', units='kg/hr') air2 = Stream('air_lagoon2', O2=51061, N2=168162, phase='g', units='kg/hr') J1 = bst.Junction('J1', upstream=S202-0, downstream=Stream()) sacch_split = 0.05#This is just a initialization ammonia_zmass = 0.0052 M301 = bst.Mixer('M301', ins=(ammonia, process_water2)) M302 = bst.Mixer('M302', ins=(J1-0, M301-0)) S303 = units.PressureFilter('S303', ins=(M302-0),outs=(Stream('cooled_hydrolysate'),Stream('residual_water')), moisture_content=0.4,split=find_split_solids(M302-0,non_soluble)) WIS_prehyd = 0.20 cooled_hydrolyzate_pre = M302.outs[0] S303_out0 = S303.outs[0] S303_out1 = S303.outs[1] def update_moisture_content(): F_non_sol_S303in = find_WIS(cooled_hydrolyzate_pre,non_soluble)*cooled_hydrolyzate_pre.F_mass F_sol_S303in = cooled_hydrolyzate_pre.F_mass - F_non_sol_S303in F_sol_S303out = F_non_sol_S303in/WIS_prehyd - cellulase.F_mass - F_non_sol_S303in split_soluble = F_sol_S303out/F_sol_S303in new_split=find_split_solids(cooled_hydrolyzate_pre,non_soluble) new_split[new_split==0] = split_soluble S303_out0.mass[:] = cooled_hydrolyzate_pre.mass[:]*new_split S303_out1.mass[:] = cooled_hydrolyzate_pre.mass[:]*(1-new_split) T203 = units.AmmoniaAdditionTank('T203', ins=S303-0) M303 = units.EnzymeHydrolysateMixer('M303', ins=(T203-0, cellulase)) cellulase_over_WIS = 0.05*cellulase_conc water_over_WIS = 0.05*(1-cellulase_conc) def update_cellulase_and_nutrient_loading(): WIS_premixer = cooled_hydrolyzate_pre.F_mass*find_WIS(cooled_hydrolyzate_pre, non_soluble) cellulase_mass = cellulase_over_WIS*WIS_premixer water_mass = water_over_WIS*WIS_premixer cellulase.imass['Cellulase']=cellulase_mass*1.1 cellulase.imass['Water']=water_mass*1.1 # Note: An additional 10% is produced for the media glucose/sophorose mixture # Humbird (2011) p[g. 37 def update_ammonia_loading(): water_cooled_hydrolyzate = cooled_hydrolyzate_pre.imass['Water'] ammonia.F_mass = water_cooled_hydrolyzate*ammonia_zmass M304 = bst.Mixer('M304', ins=(M303-0, None)) R301 = units.SaccharificationAndCoFermentation('R301', ins=(M304-0, ammonia1, air1), outs=(Stream('CO2_1'),Stream('fermentation_slurry'),Stream('saccharified_to_seed')), saccharified_slurry_split = sacch_split) M305 = bst.Mixer('M305', ins=(R301-2, ammonia2, air2)) R302 = units.SeedTrain('R302', ins=M305-0, outs=(Stream('CO2_2'),Stream('effluent'))) T301 = units.SeedHoldTank('T301', ins=R302-1) T301-0-1-M304 air2_over_glucose = (R302.reactions.X[1]*2.17 + R302.reactions.X[3]*1.5/2 - R302.reactions.X[2])*1.1 ammonia2_over_glucose = R302.reactions.X[1]*0.62*1.1 preseed = M305-0 def update_nutrient_loading2(): glucose_preseed = preseed.imol['Glucose'] air2.imol['O2'] = air2_over_glucose * glucose_preseed air2.imol['N2'] = (air2_over_glucose * glucose_preseed)/0.21*0.79 ammonia2_mol = ammonia2_over_glucose * glucose_preseed - preseed.imol['Ammonia'] if ammonia2_mol < 0: ammonia2.imol['NH3'] = 0 else: ammonia2.imol['NH3'] = ammonia2_mol air1_over_glucose = (R301.cofermentation.X[1]*2.17 + R301.cofermentation.X[3]*1.5/2 - R301.cofermentation.X[2])*1.2 ammonia1_over_glucose = R301.cofermentation.X[1]*0.62*1.1 preferm = M304-0 glucose_over_glucan = R301.saccharification.X[0] + R301.saccharification.X[1]*0.5 + R301.saccharification.X[2] def update_nutrient_loading1(): glucose_preferm = preferm.imol['Glucan']*glucose_over_glucan*(1-R301.saccharified_slurry_split) air1.imol['O2'] = air1_over_glucose * glucose_preferm air1.imol['N2'] = (air1_over_glucose * glucose_preferm)/0.21*0.79 ammonia1_mol = ammonia1_over_glucose * glucose_preferm - preferm.imol['Ammonia']*(1-R301.saccharified_slurry_split) if ammonia1_mol < 0: ammonia1.imol['NH3'] = 0 else: ammonia1.imol['NH3'] = ammonia1_mol # TODO: Bug in update nutrient loading (not enough O2 to run R301 and R302 seed train) # TODO: so just ignore negative flow in the meanwhile # def ignore_negative_O2_flow(): # for i in (R301.outs + R302.outs): i.imol['O2'] = 0 seed_recycle_sys = System('seed_recycle_sys', path=(M304,update_nutrient_loading1,R301, M305, update_nutrient_loading2, R302, T301), recycle=M304-0) conc_yeast = 3.0 def f_DSferm1(x): sacch_split = x R301.saccharified_slurry_split = sacch_split for i in range(3): seed_recycle_sys.simulate() s_obj2=R301-1 light_ind = s_obj2.chemicals._light_indices l = [a for a in s_obj2.vol[light_ind] if not a==0] v_0 = s_obj2.F_vol - sum(l) conc_yeast_obtained = s_obj2.imass['S_cerevisiae']/v_0 return ((conc_yeast_obtained - conc_yeast)/conc_yeast) seed_recycle_sys.specification=BoundedNumericalSpecification(f_DSferm1, 0.01, 0.35) fermentation_sys = System('fermentation_sys', path=(J1,M301,M302,S303,update_ammonia_loading,T203,update_cellulase_and_nutrient_loading,update_moisture_content,M303,seed_recycle_sys))#update_moisture_content, T_solid_cool = 50.0+273.15 def f_DSferm2(x): mass_water=x process_water2.F_mass = mass_water for i in range(3): fermentation_sys.simulate() s_obj1=M302-0 return ((s_obj1.T-T_solid_cool)/T_solid_cool) fermentation_sys.specification=BoundedNumericalSpecification(f_DSferm2, process_water2.F_mass/2, process_water2.F_mass*2) ### Ethanol purification stripping_water = Stream('stripping_water', Water=26836,#This is just a initialization units='kg/hr') M306 = bst.Mixer('M306', ins=(R302-0, R301-0)) T302 = units.BeerTank('T302',outs=Stream('cool_feed')) # tmo.Stream.default_ID_number = 400 M401 = bst.Mixer('M401', ins=(R301-1, None)) M401-0-T302 D401 = bst.VentScrubber('D401', ins=(stripping_water, M306-0), outs=(Stream('CO2_purified'), Stream('bottom_liquid')), gas=('CO2', 'NH3', 'O2','N2')) D401-1-1-M401 # Heat up before beer column # Exchange heat with stillage mid_eth_massfrac = 0.50 high_eth_massfrac = 0.915 bott_eth_massfrac = 0.00001 dist_high_pres = 2*101325 high_dist_stream = Stream('high_eth_stream', Ethanol=high_eth_massfrac, Water=1-high_eth_massfrac, units='kg/hr') mid_dist_stream = Stream('mid_eth_stream', Ethanol=mid_eth_massfrac, Water=1-mid_eth_massfrac, units='kg/hr') bottom_stream = Stream('bottom_stream', Ethanol=bott_eth_massfrac,#only an initialization. Later it gets updated with the real composition Water=1-bott_eth_massfrac, units='kg/hr') dist_high_dp = high_dist_stream.dew_point_at_P(dist_high_pres) bott_mid_dp = bottom_stream.dew_point_at_T(dist_high_dp.T - 5) dist_mid_dp = mid_dist_stream.dew_point_at_P(bott_mid_dp.P) bott_low_dp = bottom_stream.dew_point_at_T(dist_mid_dp.T - 5) dist_low_dp = mid_dist_stream.dew_point_at_P(bott_low_dp.P) S401 = bst.Splitter('S401', ins=(T302-0),outs=(Stream('feed_low_pressure',P=bott_low_dp.P),Stream('feed_mid_pressure', P=bott_mid_dp.P)), split=0.5) H402 = bst.HXprocess('H402', ins=(S401-0, None), outs=(Stream('warmed_feed_lp'), Stream('cooled_bottom_water_lp')), U=1.28) H403 = bst.HXprocess('H403', ins=(S401-1, None), outs=(Stream('warmed_feed_mp'), Stream('cooled_bottom_water_mp')), U=1.28) # Beer column Ethanol_MW = chemicals.Ethanol.MW Water_MW = chemicals.Water.MW def Ethanol_molfrac(e): """Return ethanol mol fraction in a ethanol water mixture""" return e/Ethanol_MW / (e/Ethanol_MW + (1-e)/Water_MW) xbot = Ethanol_molfrac(bott_eth_massfrac) ytop = Ethanol_molfrac(mid_eth_massfrac) D402 = units.DistillationColumn('D402', ins=H402-0, P=bott_low_dp.P, y_top=ytop, x_bot=xbot, k=1.5, LHK=('Ethanol', 'Water'),energy_integration=True) D402.tray_material = 'Stainless steel 304' D402.vessel_material = 'Stainless steel 304' D402.BM = 2.4 D402.boiler.U = 1.85 # Condense distillate H402_dist = bst.HXutility('H402_dist', ins=D402-0, V=0,T=dist_low_dp.T-1) P402_2 = bst.Pump('P402_2', ins=H402_dist-0, P=bott_mid_dp.P) P402_2.BM = 3.1 D402-1-1-H402 LP_dist_sys = System('LP_dist_sys', path=(H402,D402,H402_dist), recycle=H402-0) D403 = units.DistillationColumn('D403', ins=H403-0, P=bott_mid_dp.P, y_top=ytop, x_bot=xbot, k=1.5, LHK=('Ethanol', 'Water'),energy_integration=True) D403.tray_material = 'Stainless steel 304' D403.vessel_material = 'Stainless steel 304' D403.BM = 2.4 D403.boiler.U = 1.85 # Condense distillate H403_dist = bst.HXutility('H403_dist', ins=D403-0, V=0,T=dist_mid_dp.T-1) D403-1-1-H403 MP_dist_sys = System('MP_dist_sys', path=(H403,D403,H403_dist), recycle=H403-0) M402 = bst.Mixer('M402', ins=(P402_2-0, H403_dist-0),outs=Stream(P=bott_mid_dp.P)) P404 = bst.Pump('P404', ins=M402-0, P=dist_high_pres) M403 = bst.Mixer('M403', ins=(H402-1, H403-1),outs=Stream('bottom_water')) S402 = units.PressureFilter('S402', ins=(M403-0),outs=(Stream('Lignin'),Stream('Thin_spillage')), flux=1220.6*0.8, moisture_content=0.35,split=find_split_solids(M403-0,non_soluble)) # Mix ethanol Recycle (Set-up) M404 = bst.Mixer('M404', ins=(P404-0, None),outs=Stream(P=dist_high_pres)) ytop = Ethanol_molfrac(high_eth_massfrac) D404 = units.DistillationColumn('D404', ins=M404-0, P=dist_high_pres, y_top=ytop, x_bot=xbot, k=1.5, LHK=('Ethanol', 'Water'),energy_integration=True) D404.tray_material = 'Stainless steel 304' D404.vessel_material = 'Stainless steel 304' D404.BM = 2.4 D404.boiler.U = 1.85 P405 = bst.Pump('P405', ins=D404-1,outs=Stream('bottom_water')) # Superheat vapor for mol sieve H404 = bst.HXutility('H404', ins=D404-0, T=dist_high_dp.T+37.0, V=1) # Molecular sieve U401 = bst.MolecularSieve('U401', ins=H404-0, split=(2165.14/13356.04, 1280.06/1383.85), order=('Ethanol', 'Water')) U401-0-1-M404 ethanol_recycle_sys = System('ethanol_recycle_sys', path=(M404, D404, H404, U401), recycle=M404-0) # Condense ethanol product H405 = bst.HXutility('H405', ins=U401-1, V=0,T=dist_high_dp.T-1) T701 = bst.StorageTank('T701', ins=H405-0, tau=7*24, vessel_type='Floating roof', vessel_material='Carbon steel') ethanol = Stream('ethanol', price=price['Ethanol']) P701 = bst.Pump('P701', ins=T701-0, outs=ethanol) P701.BM = 3.1 T701.BM = 1.7 vent_stream = M306-0 stripping_water_over_vent = stripping_water.mol / 21202.490455845436 def update_stripping_water(): stripping_water.mol[:] = stripping_water_over_vent * vent_stream.F_mass purification_sys = System('purification_sys', path=(M306, update_stripping_water, D401, M401, T302, S401, MP_dist_sys, LP_dist_sys, P402_2, M402,P404,M403,S402, ethanol_recycle_sys,P405, H405,T701,P701)) def f_DSpur(split): S401.split[:]=split for i in range(3): purification_sys.simulate() heat_cond = D403.condenser.Q + H403_dist.Q heat_boil = D402.boiler.Q return heat_boil + heat_cond #heat_boil and heat_cond have different signs purification_sys.specification=BoundedNumericalSpecification(f_DSpur, 0.10, 0.70) ### Biogas production organic_groups = ['OtherSugars', 'SugarOligomers', 'OrganicSolubleSolids', 'Furfurals', 'OtherOrganics', 'Protein', 'CellMass'] organics = list(sum([chemical_groups[i] for i in organic_groups], ('Ethanol', 'AceticAcid', 'Xylose', 'Glucose','ExtractVol','ExtractNonVol'))) organics.remove('WWTsludge') P_sludge = 0.05/0.91/chemicals.WWTsludge.MW MW = np.array([chemicals.CH4.MW, chemicals.CO2.MW]) CH4_molcomp = 0.60 mass = np.array([CH4_molcomp, 1-CH4_molcomp])*MW mass /= mass.sum() mass *= 0.381/(0.91) P_ch4, P_co2 = mass/MW def anaerobic_rxn(reactant): MW = getattr(chemicals, reactant).MW return rxn.Reaction(f"{1/MW}{reactant} -> {P_ch4}CH4 + {P_co2}CO2 + {P_sludge}WWTsludge", reactant, 0.91) anaerobic_digestion = rxn.ParallelReaction([anaerobic_rxn(i) for i in organics] + [rxn.Reaction(f"H2SO4 -> H2S + 2O2", 'H2SO4', 1.)]) well_water1 = Stream('well_water1', Water=1, T=15+273.15) J5_1 = bst.Junction('J5_1', upstream=S303-1, downstream=Stream()) J5_2 = bst.Junction('J5_2', upstream=S402-1, downstream=Stream()) J5_3 = bst.Junction('J5_3', upstream=S202-1, downstream=Stream()) J5_4 = bst.Junction('J5_4', upstream=H201-0, downstream=Stream()) J5_5 = bst.Junction('J5_5', upstream=P405-0, downstream=Stream()) M501 = bst.Mixer('M501', ins=(J5_1-0,J5_2-0, J5_3-0,J5_4-0,J5_5-0)) splits = [('Ethanol', 1, 15), ('Water', 27158, 356069), ('Glucose', 3, 42), ('Xylose', 7, 85), ('OtherSugars', 13, 175), ('SugarOligomers', 10, 130), ('OrganicSolubleSolids', 182, 2387), ('InorganicSolubleSolids', 8, 110), ('Ammonia', 48, 633), ('AceticAcid', 0, 5), ('Furfurals', 5, 70), ('OtherOrganics', 9, 113), ('Cellulose', 19, 6), ('Xylan', 6, 2), ('OtherStructuralCarbohydrates', 1, 0), ('Lignin', 186, 64), ('Protein', 51, 18), ('CellMass', 813, 280), ('OtherInsolubleSolids', 68, 23)] raw_biogas = Stream('raw_biogas', price=price['Pure biogas']*0.33) Tin_digestor = 37 + 273.15 R501 = units.AnaerobicDigestion('R501', ins=(M501-0,well_water1), outs=(raw_biogas, 'waste_effluent','sludge_effluent',''), reactions=anaerobic_digestion, sludge_split=find_split(*zip(*splits)), T=Tin_digestor) digestor_sys = System('digestor_sys', path=(J5_1,J5_2,J5_3,J5_4,J5_5,M501,R501)) ### Waste water treatment combustion = chemicals.get_combustion_reactions() def growth(reactant): f = chemicals.WWTsludge.MW / getattr(chemicals, reactant).MW return rxn.Reaction(f"{f}{reactant} -> WWTsludge", reactant, 1.) # Note, nitrogenous species included here, but most of it removed in R601 digester aerobic_digestion = rxn.ParallelReaction([i*0.74 + 0.22*growth(i.reactant) for i in combustion if (i.reactant in organics)]) aerobic_digestion.X[:] = 0.96 # tmo.Stream.default_ID_number = 600 well_water = Stream('well_water', Water=1, T=15+273.15) raw_biogas2 = Stream('raw_biogas2', price=price['Pure biogas']*0.33) WWTC = units.WasteWaterSystemCost('WWTC', ins=R501-1) R601 = units.AnaerobicDigestionWWT('R601', ins=(WWTC-0, well_water), outs=(raw_biogas2,'','',''), reactions=anaerobic_digestion, sludge_split=find_split(*zip(*splits)), T=Tin_digestor-2) air = Stream('air_lagoon', O2=51061, N2=168162, phase='g', units='kg/hr') caustic = Stream('WWT_caustic', Water=2252, NaOH=2252, units='kg/hr', price=price['Caustic']*0.5) # polymer = Stream('WWT polymer') # Empty in humbird report :-/ M602 = bst.Mixer('M602', ins=(R601-1, None)) caustic_over_waste = caustic.mol / 2544300.6261793654 air_over_waste = air.mol / 2544300.6261793654 waste = M602-0 def update_aerobic_input_streams(): F_mass_waste = waste.F_mass caustic.mol[:] = F_mass_waste * caustic_over_waste air.mol[:] = F_mass_waste * air_over_waste R602 = units.AerobicDigestionWWT('R602', ins=(waste, air, caustic), outs=('evaporated_water', ''), reactions=aerobic_digestion) splits = [('Ethanol', 0, 1), ('Water', 381300, 2241169), ('Glucose', 0, 2), ('Xylose', 1, 3), ('OtherSugars', 1, 7), ('SugarOligomers', 1, 6), ('OrganicSolubleSolids', 79, 466), ('InorganicSolubleSolids', 4828, 28378), ('Ammonia', 3, 16), ('Furfurals', 0, 3), ('OtherOrganics', 1, 7), ('CarbonDioxide', 6, 38), ('O2', 3, 17), ('N2', 5, 32), ('Cellulose', 0, 194), ('Xylan', 0, 65), ('OtherStructuralCarbohydrates', 0, 15), ('Lignin', 0, 1925), ('Protein', 0, 90), ('CellMass', 0, 19778), ('OtherInsolubleSolids', 0, 707)] S601 = bst.Splitter('S601', ins=R602-1, split=find_split(*zip(*splits))) S602 = bst.Splitter('S602', ins=S601-1, split=0.96) M603 = bst.Mixer('M603', ins=(S602-0, None)) M603-0-1-M602 M604 = bst.Mixer('M604', ins=(R601-2, S602-1)) centrifuge_species = ('Water', 'Glucose', 'Xylose', 'OtherSugars', 'SugarOligomers', 'OrganicSolubleSolids', 'InorganicSolubleSolids', 'Ammonia', 'Furfurals', 'OtherOrganics', 'CO2', 'COxSOxNOxH2S', 'Cellulose', 'Xylan', 'OtherStructuralCarbohydrates', 'Lignin', 'Protein', 'CellMass', 'OtherInsolubleSolids') S623_flow = np.array([7708, 0, 0, 1, 1, 13, 75, 3, 0, 1, 1, 2, 25, 8, 2, 250, 52, 1523, 92]) S616_flow = np.array([109098, 3, 6, 13, 9, 187, 1068, 46, 5, 8, 14, 31, 1, 0, 0, 13, 3, 80, 5]) S603 = bst.Splitter('S603', ins=M604-0, outs=('', 'sludge'), split=find_split(centrifuge_species, S616_flow, S623_flow)) S603-0-1-M603 S604 = bst.Splitter('S604', ins=S601-0, outs=('treated_water', 'waste_brine'), split={'Water': 0.987}) aerobic_recycle_sys = System('aerobic_recycle_sys', path=(M602, update_aerobic_input_streams, R602, S601, S602, M604, S603, M603), recycle=M602-0) aerobic_recycle_sys.converge_method = 'Fixed point' WWT_sys = System('WWT_sys', path=(WWTC, R601, aerobic_recycle_sys, S604)) ### Facilities BT = bst.facilities.BoilerTurbogenerator('BT', ins=(S402-0, '', 'boiler_makeup_water', 'natural_gas', 'lime', 'boilerchems'), turbogenerator_efficiency=0.85) CWP = bst.facilities.ChilledWaterPackage('CWP') CT = bst.facilities.CoolingTower('CT') CT.outs[1].T = 273.15 + 28 water_thermo = tmo.Thermo(tmo.Chemicals(['Water'])) process_water = tmo.Stream(ID='process_water', thermo=water_thermo) process_water_streams = (caustic, stripping_water, process_water1, process_water2, steam, BT-1, CT-1) def update_water_loss(): process_water.imol['Water'] = sum([i.imol['Water'] for i in process_water_streams]) makeup_water = Stream('makeup_water', thermo=water_thermo, price=price['Makeup water']) PWC = bst.facilities.ProcessWaterCenter('PWC', ins=(S604-0, makeup_water), outs=(process_water, ''), makeup_water_streams=(makeup_water,), process_water_streams=process_water_streams) Substance = tmo.Chemical.blank('Substance') Substance.at_state(phase='l') Substance.default() substance_thermo = tmo.Thermo(tmo.Chemicals([Substance])) CIP = Stream('CIP', thermo=substance_thermo, flow=(126/83333*dryflow,)) CIP_package = units.CIPpackage('CIP_package', ins=CIP, thermo=substance_thermo) plant_air = Stream('plant_air', flow=(83333/83333*dryflow,), thermo=substance_thermo) ADP = bst.facilities.AirDistributionPackage('ADP', ins=plant_air, thermo=substance_thermo) FT = units.FireWaterTank('FT', ins=Stream('fire_water', flow=(8343/83333*dryflow,), thermo=substance_thermo), thermo=substance_thermo) ### Complete system wheatstraw_sys = System('wheatstraw_sys', path=(pretreatment_sys, fermentation_sys,ammonia_storage,S301, purification_sys, digestor_sys, WWT_sys), facilities=(CWP, BT, CT, update_water_loss, PWC, ADP, CIP_package, S301, ammonia_storage, FT)) return wheatstraw_sys
42.986301
214
0.578043
4a14d3a9120b51f040cd46826a8543e2fe867af3
49,652
py
Python
sklearn/naive_bayes.py
aimoip/scikit-learn
7995941d91b0a06ef2db806be7588e2fb8dd5cb1
[ "BSD-3-Clause" ]
null
null
null
sklearn/naive_bayes.py
aimoip/scikit-learn
7995941d91b0a06ef2db806be7588e2fb8dd5cb1
[ "BSD-3-Clause" ]
null
null
null
sklearn/naive_bayes.py
aimoip/scikit-learn
7995941d91b0a06ef2db806be7588e2fb8dd5cb1
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """ The :mod:`sklearn.naive_bayes` module implements Naive Bayes algorithms. These are supervised learning methods based on applying Bayes' theorem with strong (naive) feature independence assumptions. """ # Author: Vincent Michel <vincent.michel@inria.fr> # Minor fixes by Fabian Pedregosa # Amit Aides <amitibo@tx.technion.ac.il> # Yehuda Finkelstein <yehudaf@tx.technion.ac.il> # Lars Buitinck # Jan Hendrik Metzen <jhm@informatik.uni-bremen.de> # (parts based on earlier work by Mathieu Blondel) # # License: BSD 3 clause import warnings from abc import ABCMeta, abstractmethod import numpy as np from scipy.special import logsumexp from .base import BaseEstimator, ClassifierMixin from .preprocessing import binarize from .preprocessing import LabelBinarizer from .preprocessing import label_binarize from .utils import check_X_y, check_array, deprecated from .utils.extmath import safe_sparse_dot from .utils.multiclass import _check_partial_fit_first_call from .utils.validation import check_is_fitted, check_non_negative, column_or_1d from .utils.validation import _check_sample_weight from .utils.validation import _deprecate_positional_args __all__ = ['BernoulliNB', 'GaussianNB', 'MultinomialNB', 'ComplementNB', 'CategoricalNB'] class _BaseNB(ClassifierMixin, BaseEstimator, metaclass=ABCMeta): """Abstract base class for naive Bayes estimators""" @abstractmethod def _joint_log_likelihood(self, X): """Compute the unnormalized posterior log probability of X I.e. ``log P(c) + log P(x|c)`` for all rows x of X, as an array-like of shape (n_classes, n_samples). Input is passed to _joint_log_likelihood as-is by predict, predict_proba and predict_log_proba. """ @abstractmethod def _check_X(self, X): """To be overridden in subclasses with the actual checks.""" def predict(self, X): """ Perform classification on an array of test vectors X. Parameters ---------- X : array-like of shape (n_samples, n_features) Returns ------- C : ndarray of shape (n_samples,) Predicted target values for X """ check_is_fitted(self) X = self._check_X(X) jll = self._joint_log_likelihood(X) try: return self.classes_[np.argmax(jll, axis=1)] except IndexError: print("No pattern found") return -100 def predict_log_proba(self, X): """ Return log-probability estimates for the test vector X. Parameters ---------- X : array-like of shape (n_samples, n_features) Returns ------- C : array-like of shape (n_samples, n_classes) Returns the log-probability of the samples for each class in the model. The columns correspond to the classes in sorted order, as they appear in the attribute :term:`classes_`. """ check_is_fitted(self) X = self._check_X(X) jll = self._joint_log_likelihood(X) # normalize by P(x) = P(f_1, ..., f_n) log_prob_x = logsumexp(jll, axis=1) return jll - np.atleast_2d(log_prob_x).T def predict_proba(self, X): """ Return probability estimates for the test vector X. Parameters ---------- X : array-like of shape (n_samples, n_features) Returns ------- C : array-like of shape (n_samples, n_classes) Returns the probability of the samples for each class in the model. The columns correspond to the classes in sorted order, as they appear in the attribute :term:`classes_`. """ return np.exp(self.predict_log_proba(X)) class GaussianNB(_BaseNB): """ Gaussian Naive Bayes (GaussianNB) Can perform online updates to model parameters via :meth:`partial_fit`. For details on algorithm used to update feature means and variance online, see Stanford CS tech report STAN-CS-79-773 by Chan, Golub, and LeVeque: http://i.stanford.edu/pub/cstr/reports/cs/tr/79/773/CS-TR-79-773.pdf Read more in the :ref:`User Guide <gaussian_naive_bayes>`. Parameters ---------- priors : array-like of shape (n_classes,) Prior probabilities of the classes. If specified the priors are not adjusted according to the data. var_smoothing : float, default=1e-9 Portion of the largest variance of all features that is added to variances for calculation stability. .. versionadded:: 0.20 Attributes ---------- class_count_ : ndarray of shape (n_classes,) number of training samples observed in each class. class_prior_ : ndarray of shape (n_classes,) probability of each class. classes_ : ndarray of shape (n_classes,) class labels known to the classifier epsilon_ : float absolute additive value to variances sigma_ : ndarray of shape (n_classes, n_features) Variance of each feature per class. .. deprecated:: 1.0 `sigma_` is deprecated in 1.0 and will be removed in 1.2. Use `var_` instead. var_ : ndarray of shape (n_classes, n_features) Variance of each feature per class. .. versionadded:: 1.0 theta_ : ndarray of shape (n_classes, n_features) mean of each feature per class Examples -------- >>> import numpy as np >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) >>> Y = np.array([1, 1, 1, 2, 2, 2]) >>> from sklearn.naive_bayes import GaussianNB >>> clf = GaussianNB() >>> clf.fit(X, Y) GaussianNB() >>> print(clf.predict([[-0.8, -1]])) [1] >>> clf_pf = GaussianNB() >>> clf_pf.partial_fit(X, Y, np.unique(Y)) GaussianNB() >>> print(clf_pf.predict([[-0.8, -1]])) [1] """ @_deprecate_positional_args def __init__(self, *, priors=None, var_smoothing=1e-9): self.priors = priors self.var_smoothing = var_smoothing def fit(self, X, y, sample_weight=None): """Fit Gaussian Naive Bayes according to X, y Parameters ---------- X : array-like of shape (n_samples, n_features) Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape (n_samples,) Target values. sample_weight : array-like of shape (n_samples,), default=None Weights applied to individual samples (1. for unweighted). .. versionadded:: 0.17 Gaussian Naive Bayes supports fitting with *sample_weight*. Returns ------- self : object """ X, y = self._validate_data(X, y) y = column_or_1d(y, warn=True) return self._partial_fit(X, y, np.unique(y), _refit=True, sample_weight=sample_weight) def _check_X(self, X): return check_array(X) @staticmethod def _update_mean_variance(n_past, mu, var, X, sample_weight=None): """Compute online update of Gaussian mean and variance. Given starting sample count, mean, and variance, a new set of points X, and optionally sample weights, return the updated mean and variance. (NB - each dimension (column) in X is treated as independent -- you get variance, not covariance). Can take scalar mean and variance, or vector mean and variance to simultaneously update a number of independent Gaussians. See Stanford CS tech report STAN-CS-79-773 by Chan, Golub, and LeVeque: http://i.stanford.edu/pub/cstr/reports/cs/tr/79/773/CS-TR-79-773.pdf Parameters ---------- n_past : int Number of samples represented in old mean and variance. If sample weights were given, this should contain the sum of sample weights represented in old mean and variance. mu : array-like of shape (number of Gaussians,) Means for Gaussians in original set. var : array-like of shape (number of Gaussians,) Variances for Gaussians in original set. sample_weight : array-like of shape (n_samples,), default=None Weights applied to individual samples (1. for unweighted). Returns ------- total_mu : array-like of shape (number of Gaussians,) Updated mean for each Gaussian over the combined set. total_var : array-like of shape (number of Gaussians,) Updated variance for each Gaussian over the combined set. """ if X.shape[0] == 0: return mu, var # Compute (potentially weighted) mean and variance of new datapoints if sample_weight is not None: n_new = float(sample_weight.sum()) new_mu = np.average(X, axis=0, weights=sample_weight) new_var = np.average((X - new_mu) ** 2, axis=0, weights=sample_weight) else: n_new = X.shape[0] new_var = np.var(X, axis=0) new_mu = np.mean(X, axis=0) if n_past == 0: return new_mu, new_var n_total = float(n_past + n_new) # Combine mean of old and new data, taking into consideration # (weighted) number of observations total_mu = (n_new * new_mu + n_past * mu) / n_total # Combine variance of old and new data, taking into consideration # (weighted) number of observations. This is achieved by combining # the sum-of-squared-differences (ssd) old_ssd = n_past * var new_ssd = n_new * new_var total_ssd = (old_ssd + new_ssd + (n_new * n_past / n_total) * (mu - new_mu) ** 2) total_var = total_ssd / n_total return total_mu, total_var def partial_fit(self, X, y, classes=None, sample_weight=None): """Incremental fit on a batch of samples. This method is expected to be called several times consecutively on different chunks of a dataset so as to implement out-of-core or online learning. This is especially useful when the whole dataset is too big to fit in memory at once. This method has some performance and numerical stability overhead, hence it is better to call partial_fit on chunks of data that are as large as possible (as long as fitting in the memory budget) to hide the overhead. Parameters ---------- X : array-like of shape (n_samples, n_features) Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape (n_samples,) Target values. classes : array-like of shape (n_classes,), default=None List of all the classes that can possibly appear in the y vector. Must be provided at the first call to partial_fit, can be omitted in subsequent calls. sample_weight : array-like of shape (n_samples,), default=None Weights applied to individual samples (1. for unweighted). .. versionadded:: 0.17 Returns ------- self : object """ return self._partial_fit(X, y, classes, _refit=False, sample_weight=sample_weight) def _partial_fit(self, X, y, classes=None, _refit=False, sample_weight=None): """Actual implementation of Gaussian NB fitting. Parameters ---------- X : array-like of shape (n_samples, n_features) Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape (n_samples,) Target values. classes : array-like of shape (n_classes,), default=None List of all the classes that can possibly appear in the y vector. Must be provided at the first call to partial_fit, can be omitted in subsequent calls. _refit : bool, default=False If true, act as though this were the first time we called _partial_fit (ie, throw away any past fitting and start over). sample_weight : array-like of shape (n_samples,), default=None Weights applied to individual samples (1. for unweighted). Returns ------- self : object """ X, y = check_X_y(X, y) if sample_weight is not None: sample_weight = _check_sample_weight(sample_weight, X) # If the ratio of data variance between dimensions is too small, it # will cause numerical errors. To address this, we artificially # boost the variance by epsilon, a small fraction of the standard # deviation of the largest dimension. self.epsilon_ = self.var_smoothing * np.var(X, axis=0).max() if _refit: self.classes_ = None if _check_partial_fit_first_call(self, classes): # This is the first call to partial_fit: # initialize various cumulative counters n_features = X.shape[1] n_classes = len(self.classes_) self.theta_ = np.zeros((n_classes, n_features)) self.var_ = np.zeros((n_classes, n_features)) self.class_count_ = np.zeros(n_classes, dtype=np.float64) # Initialise the class prior # Take into account the priors if self.priors is not None: priors = np.asarray(self.priors) # Check that the provide prior match the number of classes if len(priors) != n_classes: raise ValueError('Number of priors must match number of' ' classes.') # Check that the sum is 1 if not np.isclose(priors.sum(), 1.0): raise ValueError('The sum of the priors should be 1.') # Check that the prior are non-negative if (priors < 0).any(): raise ValueError('Priors must be non-negative.') self.class_prior_ = priors else: # Initialize the priors to zeros for each class self.class_prior_ = np.zeros(len(self.classes_), dtype=np.float64) else: if X.shape[1] != self.theta_.shape[1]: msg = "Number of features %d does not match previous data %d." raise ValueError(msg % (X.shape[1], self.theta_.shape[1])) # Put epsilon back in each time self.var_[:, :] -= self.epsilon_ classes = self.classes_ unique_y = np.unique(y) unique_y_in_classes = np.in1d(unique_y, classes) if not np.all(unique_y_in_classes): raise ValueError("The target label(s) %s in y do not exist in the " "initial classes %s" % (unique_y[~unique_y_in_classes], classes)) for y_i in unique_y: i = classes.searchsorted(y_i) X_i = X[y == y_i, :] if sample_weight is not None: sw_i = sample_weight[y == y_i] N_i = sw_i.sum() else: sw_i = None N_i = X_i.shape[0] new_theta, new_sigma = self._update_mean_variance( self.class_count_[i], self.theta_[i, :], self.var_[i, :], X_i, sw_i) self.theta_[i, :] = new_theta self.var_[i, :] = new_sigma self.class_count_[i] += N_i self.var_[:, :] += self.epsilon_ # Update if only no priors is provided if self.priors is None: # Empirical prior, with sample_weight taken into account self.class_prior_ = self.class_count_ / self.class_count_.sum() return self def _joint_log_likelihood(self, X): joint_log_likelihood = [] for i in range(np.size(self.classes_)): jointi = np.log(self.class_prior_[i]) n_ij = - 0.5 * np.sum(np.log(2. * np.pi * self.var_[i, :])) n_ij -= 0.5 * np.sum(((X - self.theta_[i, :]) ** 2) / (self.var_[i, :]), 1) joint_log_likelihood.append(jointi + n_ij) joint_log_likelihood = np.array(joint_log_likelihood).T return joint_log_likelihood @deprecated( # type: ignore "Attribute sigma_ was deprecated in 1.0 and will be removed in" "1.2. Use var_ instead." ) @property def sigma_(self): return self.var_ _ALPHA_MIN = 1e-10 class _BaseDiscreteNB(_BaseNB): """Abstract base class for naive Bayes on discrete/categorical data Any estimator based on this class should provide: __init__ _joint_log_likelihood(X) as per _BaseNB """ def _check_X(self, X): return check_array(X, accept_sparse='csr') def _check_X_y(self, X, y): return self._validate_data(X, y, accept_sparse='csr') def _update_class_log_prior(self, class_prior=None): n_classes = len(self.classes_) if class_prior is not None: if len(class_prior) != n_classes: raise ValueError("Number of priors must match number of" " classes.") self.class_log_prior_ = np.log(class_prior) elif self.fit_prior: with warnings.catch_warnings(): # silence the warning when count is 0 because class was not yet # observed warnings.simplefilter("ignore", RuntimeWarning) log_class_count = np.log(self.class_count_) # empirical prior, with sample_weight taken into account self.class_log_prior_ = (log_class_count - np.log(self.class_count_.sum())) else: self.class_log_prior_ = np.full(n_classes, -np.log(n_classes)) def _check_alpha(self): if np.min(self.alpha) < 0: raise ValueError('Smoothing parameter alpha = %.1e. ' 'alpha should be > 0.' % np.min(self.alpha)) if isinstance(self.alpha, np.ndarray): if not self.alpha.shape[0] == self.n_features_: raise ValueError("alpha should be a scalar or a numpy array " "with shape [n_features]") if np.min(self.alpha) < _ALPHA_MIN: warnings.warn('alpha too small will result in numeric errors, ' 'setting alpha = %.1e' % _ALPHA_MIN) return np.maximum(self.alpha, _ALPHA_MIN) return self.alpha def partial_fit(self, X, y, classes=None, sample_weight=None): """Incremental fit on a batch of samples. This method is expected to be called several times consecutively on different chunks of a dataset so as to implement out-of-core or online learning. This is especially useful when the whole dataset is too big to fit in memory at once. This method has some performance overhead hence it is better to call partial_fit on chunks of data that are as large as possible (as long as fitting in the memory budget) to hide the overhead. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape (n_samples,) Target values. classes : array-like of shape (n_classes), default=None List of all the classes that can possibly appear in the y vector. Must be provided at the first call to partial_fit, can be omitted in subsequent calls. sample_weight : array-like of shape (n_samples,), default=None Weights applied to individual samples (1. for unweighted). Returns ------- self : object """ X, y = self._check_X_y(X, y) _, n_features = X.shape if _check_partial_fit_first_call(self, classes): # This is the first call to partial_fit: # initialize various cumulative counters n_effective_classes = len(classes) if len(classes) > 1 else 2 self._init_counters(n_effective_classes, n_features) self.n_features_ = n_features elif n_features != self.n_features_: msg = "Number of features %d does not match previous data %d." raise ValueError(msg % (n_features, self.n_features_)) Y = label_binarize(y, classes=self.classes_) if Y.shape[1] == 1: Y = np.concatenate((1 - Y, Y), axis=1) if X.shape[0] != Y.shape[0]: msg = "X.shape[0]=%d and y.shape[0]=%d are incompatible." raise ValueError(msg % (X.shape[0], y.shape[0])) # label_binarize() returns arrays with dtype=np.int64. # We convert it to np.float64 to support sample_weight consistently Y = Y.astype(np.float64, copy=False) if sample_weight is not None: sample_weight = _check_sample_weight(sample_weight, X) sample_weight = np.atleast_2d(sample_weight) Y *= sample_weight.T class_prior = self.class_prior # Count raw events from data before updating the class log prior # and feature log probas self._count(X, Y) # XXX: OPTIM: we could introduce a public finalization method to # be called by the user explicitly just once after several consecutive # calls to partial_fit and prior any call to predict[_[log_]proba] # to avoid computing the smooth log probas at each call to partial fit alpha = self._check_alpha() self._update_feature_log_prob(alpha) self._update_class_log_prior(class_prior=class_prior) return self def fit(self, X, y, sample_weight=None): """Fit Naive Bayes classifier according to X, y Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like of shape (n_samples,) Target values. sample_weight : array-like of shape (n_samples,), default=None Weights applied to individual samples (1. for unweighted). Returns ------- self : object """ X, y = self._check_X_y(X, y) _, n_features = X.shape self.n_features_ = n_features labelbin = LabelBinarizer() Y = labelbin.fit_transform(y) self.classes_ = labelbin.classes_ if Y.shape[1] == 1: Y = np.concatenate((1 - Y, Y), axis=1) # LabelBinarizer().fit_transform() returns arrays with dtype=np.int64. # We convert it to np.float64 to support sample_weight consistently; # this means we also don't have to cast X to floating point if sample_weight is not None: Y = Y.astype(np.float64, copy=False) sample_weight = _check_sample_weight(sample_weight, X) sample_weight = np.atleast_2d(sample_weight) Y *= sample_weight.T class_prior = self.class_prior # Count raw events from data before updating the class log prior # and feature log probas n_effective_classes = Y.shape[1] self._init_counters(n_effective_classes, n_features) self._count(X, Y) alpha = self._check_alpha() self._update_feature_log_prob(alpha) self._update_class_log_prior(class_prior=class_prior) return self def _init_counters(self, n_effective_classes, n_features): self.class_count_ = np.zeros(n_effective_classes, dtype=np.float64) self.feature_count_ = np.zeros((n_effective_classes, n_features), dtype=np.float64) # mypy error: Decorated property not supported @deprecated("Attribute coef_ was deprecated in " # type: ignore "version 0.24 and will be removed in 1.1 (renaming of 0.26).") @property def coef_(self): return (self.feature_log_prob_[1:] if len(self.classes_) == 2 else self.feature_log_prob_) # mypy error: Decorated property not supported @deprecated("Attribute intercept_ was deprecated in " # type: ignore "version 0.24 and will be removed in 1.1 (renaming of 0.26).") @property def intercept_(self): return (self.class_log_prior_[1:] if len(self.classes_) == 2 else self.class_log_prior_) def _more_tags(self): return {'poor_score': True} class MultinomialNB(_BaseDiscreteNB): """ Naive Bayes classifier for multinomial models The multinomial Naive Bayes classifier is suitable for classification with discrete features (e.g., word counts for text classification). The multinomial distribution normally requires integer feature counts. However, in practice, fractional counts such as tf-idf may also work. Read more in the :ref:`User Guide <multinomial_naive_bayes>`. Parameters ---------- alpha : float, default=1.0 Additive (Laplace/Lidstone) smoothing parameter (0 for no smoothing). fit_prior : bool, default=True Whether to learn class prior probabilities or not. If false, a uniform prior will be used. class_prior : array-like of shape (n_classes,), default=None Prior probabilities of the classes. If specified the priors are not adjusted according to the data. Attributes ---------- class_count_ : ndarray of shape (n_classes,) Number of samples encountered for each class during fitting. This value is weighted by the sample weight when provided. class_log_prior_ : ndarray of shape (n_classes, ) Smoothed empirical log probability for each class. classes_ : ndarray of shape (n_classes,) Class labels known to the classifier coef_ : ndarray of shape (n_classes, n_features) Mirrors ``feature_log_prob_`` for interpreting `MultinomialNB` as a linear model. .. deprecated:: 0.24 ``coef_`` is deprecated in 0.24 and will be removed in 1.1 (renaming of 0.26). feature_count_ : ndarray of shape (n_classes, n_features) Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. feature_log_prob_ : ndarray of shape (n_classes, n_features) Empirical log probability of features given a class, ``P(x_i|y)``. intercept_ : ndarray of shape (n_classes,) Mirrors ``class_log_prior_`` for interpreting `MultinomialNB` as a linear model. .. deprecated:: 0.24 ``intercept_`` is deprecated in 0.24 and will be removed in 1.1 (renaming of 0.26). n_features_ : int Number of features of each sample. Examples -------- >>> import numpy as np >>> rng = np.random.RandomState(1) >>> X = rng.randint(5, size=(6, 100)) >>> y = np.array([1, 2, 3, 4, 5, 6]) >>> from sklearn.naive_bayes import MultinomialNB >>> clf = MultinomialNB() >>> clf.fit(X, y) MultinomialNB() >>> print(clf.predict(X[2:3])) [3] Notes ----- For the rationale behind the names `coef_` and `intercept_`, i.e. naive Bayes as a linear classifier, see J. Rennie et al. (2003), Tackling the poor assumptions of naive Bayes text classifiers, ICML. References ---------- C.D. Manning, P. Raghavan and H. Schuetze (2008). Introduction to Information Retrieval. Cambridge University Press, pp. 234-265. https://nlp.stanford.edu/IR-book/html/htmledition/naive-bayes-text-classification-1.html """ @_deprecate_positional_args def __init__(self, *, alpha=1.0, fit_prior=True, class_prior=None): self.alpha = alpha self.fit_prior = fit_prior self.class_prior = class_prior def _more_tags(self): return {'requires_positive_X': True} def _count(self, X, Y): """Count and smooth feature occurrences.""" check_non_negative(X, "MultinomialNB (input X)") self.feature_count_ += safe_sparse_dot(Y.T, X) self.class_count_ += Y.sum(axis=0) def _update_feature_log_prob(self, alpha): """Apply smoothing to raw counts and recompute log probabilities""" smoothed_fc = self.feature_count_ + alpha smoothed_cc = smoothed_fc.sum(axis=1) self.feature_log_prob_ = (np.log(smoothed_fc) - np.log(smoothed_cc.reshape(-1, 1))) def _joint_log_likelihood(self, X): """Calculate the posterior log probability of the samples X""" return (safe_sparse_dot(X, self.feature_log_prob_.T) + self.class_log_prior_) class ComplementNB(_BaseDiscreteNB): """The Complement Naive Bayes classifier described in Rennie et al. (2003). The Complement Naive Bayes classifier was designed to correct the "severe assumptions" made by the standard Multinomial Naive Bayes classifier. It is particularly suited for imbalanced data sets. Read more in the :ref:`User Guide <complement_naive_bayes>`. .. versionadded:: 0.20 Parameters ---------- alpha : float, default=1.0 Additive (Laplace/Lidstone) smoothing parameter (0 for no smoothing). fit_prior : bool, default=True Only used in edge case with a single class in the training set. class_prior : array-like of shape (n_classes,), default=None Prior probabilities of the classes. Not used. norm : bool, default=False Whether or not a second normalization of the weights is performed. The default behavior mirrors the implementations found in Mahout and Weka, which do not follow the full algorithm described in Table 9 of the paper. Attributes ---------- class_count_ : ndarray of shape (n_classes,) Number of samples encountered for each class during fitting. This value is weighted by the sample weight when provided. class_log_prior_ : ndarray of shape (n_classes,) Smoothed empirical log probability for each class. Only used in edge case with a single class in the training set. classes_ : ndarray of shape (n_classes,) Class labels known to the classifier coef_ : ndarray of shape (n_classes, n_features) Mirrors ``feature_log_prob_`` for interpreting `ComplementNB` as a linear model. .. deprecated:: 0.24 ``coef_`` is deprecated in 0.24 and will be removed in 1.1 (renaming of 0.26). feature_all_ : ndarray of shape (n_features,) Number of samples encountered for each feature during fitting. This value is weighted by the sample weight when provided. feature_count_ : ndarray of shape (n_classes, n_features) Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. feature_log_prob_ : ndarray of shape (n_classes, n_features) Empirical weights for class complements. intercept_ : ndarray of shape (n_classes,) Mirrors ``class_log_prior_`` for interpreting `ComplementNB` as a linear model. .. deprecated:: 0.24 ``coef_`` is deprecated in 0.24 and will be removed in 1.1 (renaming of 0.26). n_features_ : int Number of features of each sample. Examples -------- >>> import numpy as np >>> rng = np.random.RandomState(1) >>> X = rng.randint(5, size=(6, 100)) >>> y = np.array([1, 2, 3, 4, 5, 6]) >>> from sklearn.naive_bayes import ComplementNB >>> clf = ComplementNB() >>> clf.fit(X, y) ComplementNB() >>> print(clf.predict(X[2:3])) [3] References ---------- Rennie, J. D., Shih, L., Teevan, J., & Karger, D. R. (2003). Tackling the poor assumptions of naive bayes text classifiers. In ICML (Vol. 3, pp. 616-623). https://people.csail.mit.edu/jrennie/papers/icml03-nb.pdf """ @_deprecate_positional_args def __init__(self, *, alpha=1.0, fit_prior=True, class_prior=None, norm=False): self.alpha = alpha self.fit_prior = fit_prior self.class_prior = class_prior self.norm = norm def _more_tags(self): return {'requires_positive_X': True} def _count(self, X, Y): """Count feature occurrences.""" check_non_negative(X, "ComplementNB (input X)") self.feature_count_ += safe_sparse_dot(Y.T, X) self.class_count_ += Y.sum(axis=0) self.feature_all_ = self.feature_count_.sum(axis=0) def _update_feature_log_prob(self, alpha): """Apply smoothing to raw counts and compute the weights.""" comp_count = self.feature_all_ + alpha - self.feature_count_ logged = np.log(comp_count / comp_count.sum(axis=1, keepdims=True)) # _BaseNB.predict uses argmax, but ComplementNB operates with argmin. if self.norm: summed = logged.sum(axis=1, keepdims=True) feature_log_prob = logged / summed else: feature_log_prob = -logged self.feature_log_prob_ = feature_log_prob def _joint_log_likelihood(self, X): """Calculate the class scores for the samples in X.""" jll = safe_sparse_dot(X, self.feature_log_prob_.T) if len(self.classes_) == 1: jll += self.class_log_prior_ return jll class BernoulliNB(_BaseDiscreteNB): """Naive Bayes classifier for multivariate Bernoulli models. Like MultinomialNB, this classifier is suitable for discrete data. The difference is that while MultinomialNB works with occurrence counts, BernoulliNB is designed for binary/boolean features. Read more in the :ref:`User Guide <bernoulli_naive_bayes>`. Parameters ---------- alpha : float, default=1.0 Additive (Laplace/Lidstone) smoothing parameter (0 for no smoothing). binarize : float or None, default=0.0 Threshold for binarizing (mapping to booleans) of sample features. If None, input is presumed to already consist of binary vectors. fit_prior : bool, default=True Whether to learn class prior probabilities or not. If false, a uniform prior will be used. class_prior : array-like of shape (n_classes,), default=None Prior probabilities of the classes. If specified the priors are not adjusted according to the data. Attributes ---------- class_count_ : ndarray of shape (n_classes) Number of samples encountered for each class during fitting. This value is weighted by the sample weight when provided. class_log_prior_ : ndarray of shape (n_classes) Log probability of each class (smoothed). classes_ : ndarray of shape (n_classes,) Class labels known to the classifier coef_ : ndarray of shape (n_classes, n_features) Mirrors ``feature_log_prob_`` for interpreting `BernoulliNB` as a linear model. feature_count_ : ndarray of shape (n_classes, n_features) Number of samples encountered for each (class, feature) during fitting. This value is weighted by the sample weight when provided. feature_log_prob_ : ndarray of shape (n_classes, n_features) Empirical log probability of features given a class, P(x_i|y). intercept_ : ndarray of shape (n_classes,) Mirrors ``class_log_prior_`` for interpreting `BernoulliNB` as a linear model. n_features_ : int Number of features of each sample. Examples -------- >>> import numpy as np >>> rng = np.random.RandomState(1) >>> X = rng.randint(5, size=(6, 100)) >>> Y = np.array([1, 2, 3, 4, 4, 5]) >>> from sklearn.naive_bayes import BernoulliNB >>> clf = BernoulliNB() >>> clf.fit(X, Y) BernoulliNB() >>> print(clf.predict(X[2:3])) [3] References ---------- C.D. Manning, P. Raghavan and H. Schuetze (2008). Introduction to Information Retrieval. Cambridge University Press, pp. 234-265. https://nlp.stanford.edu/IR-book/html/htmledition/the-bernoulli-model-1.html A. McCallum and K. Nigam (1998). A comparison of event models for naive Bayes text classification. Proc. AAAI/ICML-98 Workshop on Learning for Text Categorization, pp. 41-48. V. Metsis, I. Androutsopoulos and G. Paliouras (2006). Spam filtering with naive Bayes -- Which naive Bayes? 3rd Conf. on Email and Anti-Spam (CEAS). """ @_deprecate_positional_args def __init__(self, *, alpha=1.0, binarize=.0, fit_prior=True, class_prior=None): self.alpha = alpha self.binarize = binarize self.fit_prior = fit_prior self.class_prior = class_prior def _check_X(self, X): X = super()._check_X(X) if self.binarize is not None: X = binarize(X, threshold=self.binarize) return X def _check_X_y(self, X, y): X, y = super()._check_X_y(X, y) if self.binarize is not None: X = binarize(X, threshold=self.binarize) return X, y def _count(self, X, Y): """Count and smooth feature occurrences.""" self.feature_count_ += safe_sparse_dot(Y.T, X) self.class_count_ += Y.sum(axis=0) def _update_feature_log_prob(self, alpha): """Apply smoothing to raw counts and recompute log probabilities""" smoothed_fc = self.feature_count_ + alpha smoothed_cc = self.class_count_ + alpha * 2 self.feature_log_prob_ = (np.log(smoothed_fc) - np.log(smoothed_cc.reshape(-1, 1))) def _joint_log_likelihood(self, X): """Calculate the posterior log probability of the samples X""" n_classes, n_features = self.feature_log_prob_.shape n_samples, n_features_X = X.shape if n_features_X != n_features: raise ValueError("Expected input with %d features, got %d instead" % (n_features, n_features_X)) neg_prob = np.log(1 - np.exp(self.feature_log_prob_)) # Compute neg_prob · (1 - X).T as ∑neg_prob - X · neg_prob jll = safe_sparse_dot(X, (self.feature_log_prob_ - neg_prob).T) jll += self.class_log_prior_ + neg_prob.sum(axis=1) return jll class CategoricalNB(_BaseDiscreteNB): """Naive Bayes classifier for categorical features The categorical Naive Bayes classifier is suitable for classification with discrete features that are categorically distributed. The categories of each feature are drawn from a categorical distribution. Read more in the :ref:`User Guide <categorical_naive_bayes>`. Parameters ---------- alpha : float, default=1.0 Additive (Laplace/Lidstone) smoothing parameter (0 for no smoothing). fit_prior : bool, default=True Whether to learn class prior probabilities or not. If false, a uniform prior will be used. class_prior : array-like of shape (n_classes,), default=None Prior probabilities of the classes. If specified the priors are not adjusted according to the data. min_categories : int or array-like of shape (n_features,), default=None Minimum number of categories per feature. - integer: Sets the minimum number of categories per feature to `n_categories` for each features. - array-like: shape (n_features,) where `n_categories[i]` holds the minimum number of categories for the ith column of the input. - None (default): Determines the number of categories automatically from the training data. .. versionadded:: 0.24 Attributes ---------- category_count_ : list of arrays of shape (n_features,) Holds arrays of shape (n_classes, n_categories of respective feature) for each feature. Each array provides the number of samples encountered for each class and category of the specific feature. class_count_ : ndarray of shape (n_classes,) Number of samples encountered for each class during fitting. This value is weighted by the sample weight when provided. class_log_prior_ : ndarray of shape (n_classes,) Smoothed empirical log probability for each class. classes_ : ndarray of shape (n_classes,) Class labels known to the classifier feature_log_prob_ : list of arrays of shape (n_features,) Holds arrays of shape (n_classes, n_categories of respective feature) for each feature. Each array provides the empirical log probability of categories given the respective feature and class, ``P(x_i|y)``. n_features_ : int Number of features of each sample. n_categories_ : ndarray of shape (n_features,), dtype=np.int64 Number of categories for each feature. This value is inferred from the data or set by the minimum number of categories. .. versionadded:: 0.24 Examples -------- >>> import numpy as np >>> rng = np.random.RandomState(1) >>> X = rng.randint(5, size=(6, 100)) >>> y = np.array([1, 2, 3, 4, 5, 6]) >>> from sklearn.naive_bayes import CategoricalNB >>> clf = CategoricalNB() >>> clf.fit(X, y) CategoricalNB() >>> print(clf.predict(X[2:3])) [3] """ @_deprecate_positional_args def __init__(self, *, alpha=1.0, fit_prior=True, class_prior=None, min_categories=None): self.alpha = alpha self.fit_prior = fit_prior self.class_prior = class_prior self.min_categories = min_categories def fit(self, X, y, sample_weight=None): """Fit Naive Bayes classifier according to X, y Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Training vectors, where n_samples is the number of samples and n_features is the number of features. Here, each feature of X is assumed to be from a different categorical distribution. It is further assumed that all categories of each feature are represented by the numbers 0, ..., n - 1, where n refers to the total number of categories for the given feature. This can, for instance, be achieved with the help of OrdinalEncoder. y : array-like of shape (n_samples,) Target values. sample_weight : array-like of shape (n_samples), default=None Weights applied to individual samples (1. for unweighted). Returns ------- self : object """ return super().fit(X, y, sample_weight=sample_weight) def partial_fit(self, X, y, classes=None, sample_weight=None): """Incremental fit on a batch of samples. This method is expected to be called several times consecutively on different chunks of a dataset so as to implement out-of-core or online learning. This is especially useful when the whole dataset is too big to fit in memory at once. This method has some performance overhead hence it is better to call partial_fit on chunks of data that are as large as possible (as long as fitting in the memory budget) to hide the overhead. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Training vectors, where n_samples is the number of samples and n_features is the number of features. Here, each feature of X is assumed to be from a different categorical distribution. It is further assumed that all categories of each feature are represented by the numbers 0, ..., n - 1, where n refers to the total number of categories for the given feature. This can, for instance, be achieved with the help of OrdinalEncoder. y : array-like of shape (n_samples) Target values. classes : array-like of shape (n_classes), default=None List of all the classes that can possibly appear in the y vector. Must be provided at the first call to partial_fit, can be omitted in subsequent calls. sample_weight : array-like of shape (n_samples), default=None Weights applied to individual samples (1. for unweighted). Returns ------- self : object """ return super().partial_fit(X, y, classes, sample_weight=sample_weight) def _more_tags(self): return {'requires_positive_X': True} def _check_X(self, X): X = check_array(X, dtype='int', accept_sparse=False, force_all_finite=True) check_non_negative(X, "CategoricalNB (input X)") return X def _check_X_y(self, X, y): X, y = self._validate_data(X, y, dtype='int', accept_sparse=False, force_all_finite=True) check_non_negative(X, "CategoricalNB (input X)") return X, y def _init_counters(self, n_effective_classes, n_features): self.class_count_ = np.zeros(n_effective_classes, dtype=np.float64) self.category_count_ = [np.zeros((n_effective_classes, 0)) for _ in range(n_features)] @staticmethod def _validate_n_categories(X, min_categories): # rely on max for n_categories categories are encoded between 0...n-1 n_categories_X = X.max(axis=0) + 1 min_categories_ = np.array(min_categories) if min_categories is not None: if not np.issubdtype(min_categories_.dtype, np.signedinteger): raise ValueError( f"'min_categories' should have integral type. Got " f"{min_categories_.dtype} instead." ) n_categories_ = np.maximum(n_categories_X, min_categories_, dtype=np.int64) if n_categories_.shape != n_categories_X.shape: raise ValueError( f"'min_categories' should have shape ({X.shape[1]}," f") when an array-like is provided. Got" f" {min_categories_.shape} instead." ) return n_categories_ else: return n_categories_X def _count(self, X, Y): def _update_cat_count_dims(cat_count, highest_feature): diff = highest_feature + 1 - cat_count.shape[1] if diff > 0: # we append a column full of zeros for each new category return np.pad(cat_count, [(0, 0), (0, diff)], 'constant') return cat_count def _update_cat_count(X_feature, Y, cat_count, n_classes): for j in range(n_classes): mask = Y[:, j].astype(bool) if Y.dtype.type == np.int64: weights = None else: weights = Y[mask, j] counts = np.bincount(X_feature[mask], weights=weights) indices = np.nonzero(counts)[0] cat_count[j, indices] += counts[indices] self.class_count_ += Y.sum(axis=0) self.n_categories_ = self._validate_n_categories( X, self.min_categories) for i in range(self.n_features_): X_feature = X[:, i] self.category_count_[i] = _update_cat_count_dims( self.category_count_[i], self.n_categories_[i] - 1) _update_cat_count(X_feature, Y, self.category_count_[i], self.class_count_.shape[0]) def _update_feature_log_prob(self, alpha): feature_log_prob = [] for i in range(self.n_features_): smoothed_cat_count = self.category_count_[i] + alpha smoothed_class_count = smoothed_cat_count.sum(axis=1) feature_log_prob.append( np.log(smoothed_cat_count) - np.log(smoothed_class_count.reshape(-1, 1))) self.feature_log_prob_ = feature_log_prob def _joint_log_likelihood(self, X): if not X.shape[1] == self.n_features_: raise ValueError("Expected input with %d features, got %d instead" % (self.n_features_, X.shape[1])) jll = np.zeros((X.shape[0], self.class_count_.shape[0])) for i in range(self.n_features_): indices = X[:, i] jll += self.feature_log_prob_[i][:, indices].T total_ll = jll + self.class_log_prior_ return total_ll
37.416729
92
0.620317
4a14d3ac2d671aeb1ac8cded25e7edd2258a9e0b
1,246
py
Python
_old/play.py
tekkie1618/kards
1ccac0f9484a8acb6a79eee135a0b719d8b39f44
[ "MIT" ]
1
2022-02-26T17:39:54.000Z
2022-02-26T17:39:54.000Z
_old/play.py
tekkie1618/kards
1ccac0f9484a8acb6a79eee135a0b719d8b39f44
[ "MIT" ]
null
null
null
_old/play.py
tekkie1618/kards
1ccac0f9484a8acb6a79eee135a0b719d8b39f44
[ "MIT" ]
null
null
null
from deck import Deck from hand import Hand from check_score import check_score import settings def play(num_players: int, starting_cards: int, number_of_decks: int = 1): hands = [] deck = Deck(number_of_decks) hand = Hand() for player in range(num_players): hand.cards = deck.deal(starting_cards) hand.print(player) hands.append(hand.cards) print(f"{hand.cards}\n") score = check_score(hands) print( # ! TODO FIX f"Results\nWinner: Player(s) {[x + 1 for x in list(score['winning_players'])]}\nScore: {score['winning_hand_name']}\nHand: {score['winning_hand']}\n" # f"Results\nWinner: Player(s) {score['winning_players']}\n" ) print(f"Scores\n{score}\n") def main(): if ( settings.NUMBER_OF_PLAYERS > 0 and settings.STARTING_CARDS > 0 and settings.NUMBER_OF_DECKS > 0 ): play( settings.NUMBER_OF_PLAYERS, settings.STARTING_CARDS, settings.NUMBER_OF_DECKS, ) else: print( "Check your settings!\nMust have at least 1 player, 1 starting card, and 1 deck." ) if __name__ == "__main__": main()
28.318182
158
0.592295
4a14d441a7c858c820d1374a1c7b9ce7a4ffa33d
1,390
py
Python
utils.py
ShuvozitGhose/UDBNET
47f1dbc018c51d590c4b5f35d24bffeee2a22a71
[ "MIT" ]
6
2020-07-15T01:04:46.000Z
2022-02-04T18:53:51.000Z
utils.py
ShuvozitGhose/UDBNET
47f1dbc018c51d590c4b5f35d24bffeee2a22a71
[ "MIT" ]
4
2020-09-09T21:22:14.000Z
2021-05-02T12:36:25.000Z
utils.py
ShuvozitGhose/UDBNET
47f1dbc018c51d590c4b5f35d24bffeee2a22a71
[ "MIT" ]
4
2020-07-07T11:24:38.000Z
2021-06-23T07:41:59.000Z
import torch device = torch.device("cuda" if torch.cuda.is_available() else "cpu") import numpy class Denormalization: def __init__(self): self.mean_3 = torch.tensor([-1.0, -1.0, -1.0]).to(device) self.std_3 = torch.tensor([1 / 0.5, 1 / 0.5, 1 / 0.5]).to(device) self.mean_1 = torch.tensor([-1.0]).to(device) self.std_1 = torch.tensor([1 / 0.5]).to(device) self.mean_norm = torch.tensor([0.485,0.456,0.406]).to(device) self.std_norm = torch.tensor([0.229,0.224,0.225]).to(device) def __call__(self, inp_tensor): #print(inp_tensor) if inp_tensor.shape[1] == 3: #inp_tensor.sub_(self.mean_3[None, :, None, None]).div_(self.std_3[None, :, None, None]) inp_tensor_denorm = (inp_tensor - self.mean_3[None, :, None, None])/(self.std_3[None, :, None, None]) elif inp_tensor.shape[1] == 1: #inp_tensor.sub_(self.mean_1[None, :, None, None]).div_(self.std_1[None, :, None, None]) inp_tensor_denorm = (inp_tensor - self.mean_1[None, :, None, None]) / (self.std_1[None, :, None, None]) #print(inp_tensor) #inp_tensor.sub_(self.mean_norm[None, :, None, None]).div_(self.std_norm[None, :, None, None]) inp_tensor_renorm = (inp_tensor_denorm - self.mean_norm[None, :, None, None])/(self.std_norm[None, :, None, None]) return inp_tensor_renorm
55.6
122
0.617986
4a14d4ace98dc8ca984d719a35b88c2d396fff17
1,754
py
Python
plot.py
toytag/Parallel-KMeans
078d98da064f148edb1ec3344f84a15c8326ffad
[ "MIT" ]
null
null
null
plot.py
toytag/Parallel-KMeans
078d98da064f148edb1ec3344f84a15c8326ffad
[ "MIT" ]
null
null
null
plot.py
toytag/Parallel-KMeans
078d98da064f148edb1ec3344f84a15c8326ffad
[ "MIT" ]
null
null
null
################ ## ## File: plot.py ## ## Plot the 2-d data points in standardized 'res.txt' ascii files, utlizing ## the `matplotlib.pyplot` scatter plots. ## ## * Number of colors (i.e. clusters) CANNOT exceed 20. ## ## Jose @ ShanghaiTech University ## ################ import numpy as np import matplotlib.pyplot as plt import os, sys # # Backup color set for 20 different clusters. # colorset = ["black" , "gray" , "lightcoral" , "maroon" , "indigo", "chocolate", "gold" , "lightseagreen", "lawngreen", "olive" , "cyan" , "skyblue", "navy" , "blue" , "red" , "violet" , "hotpink", "lightpink" , "azure" , "beige" ] # # Open the original data file and the clustered output. # if len(sys.argv) != 2: print("Usage: make plot FILE=<result.txt>") exit(-1) if not (os.path.isfile(sys.argv[1])): raise RuntimeError("Data file "+sys.argv[1]+" does not exist!") fo = open(sys.argv[1]) pn, cn = tuple(map(lambda s: int(s), fo.readline().split("/"))) # # Read in data points from both files. # data = np.array([(float(l.split(", ")[0]), float(l.split(", ")[1])) for l in fo.readlines()]) # # Read in corresponding colors. # fo.seek(0); fo.readline() clist = [colorset[int(l.split(", ")[2])] for l in fo.readlines()] # # Plot the points on 2-d axis with colors. # print("Plotting might be laggish, please be patient...", end='') sys.stdout.flush() plt.figure(figsize=(11, 5)) plt.subplot(1, 2, 1) # Original. plt.scatter(data[:,0], data[:,1], c='black', s=1) plt.title("Original Distribution") plt.subplot(1, 2, 2) # Result. plt.scatter(data[:,0], data[:,1], c= clist , s=1) plt.title("Result of K-Means Clustering") plt.show() print("done.")
27.84127
76
0.594641
4a14d52a78d94fea977285ebd886c49d85dce53f
215
py
Python
deui/html/attribute/type_attr.py
urushiyama/DeUI
14530d2dae7d96a3dee30759f85e02239fb433c5
[ "MIT" ]
1
2021-10-17T01:54:18.000Z
2021-10-17T01:54:18.000Z
deui/html/attribute/type_attr.py
urushiyama/DeUI
14530d2dae7d96a3dee30759f85e02239fb433c5
[ "MIT" ]
null
null
null
deui/html/attribute/type_attr.py
urushiyama/DeUI
14530d2dae7d96a3dee30759f85e02239fb433c5
[ "MIT" ]
null
null
null
from .attribute_builder import AttributeBuilder class Type(AttributeBuilder): """ Represents 'type' attribute. """ def __init__(self): super().__init__() self.attributes = ["type"]
19.545455
47
0.637209
4a14d535549cf23a75cefaf9dd84a95dea61e778
338
py
Python
src/boxes/box_processor_textfusenet.py
gregbugaj/marie-ai
f51a74f19ab5d7231c9f8a426284feff1671b974
[ "MIT" ]
4
2021-09-23T22:38:48.000Z
2022-01-19T12:03:02.000Z
src/boxes/box_processor_textfusenet.py
gregbugaj/marie-icr
f51a74f19ab5d7231c9f8a426284feff1671b974
[ "MIT" ]
17
2021-12-22T16:37:21.000Z
2022-03-16T16:07:34.000Z
src/boxes/box_processor_textfusenet.py
gregbugaj/marie-ai
f51a74f19ab5d7231c9f8a426284feff1671b974
[ "MIT" ]
null
null
null
from boxes.box_processor import BoxProcessor, PSMode class BoxProcessorTextFuseNet(BoxProcessor): """ "TextFuseNet box processor responsible for extracting bounding boxes for given documents""" def __init__(self): super().__init__() def extract_bounding_boxes(self, _id, key, img, psm=PSMode.SPARSE): pass
28.166667
99
0.733728
4a14d8f1a152b92d53bf5fdb5cf14105caed6015
2,344
py
Python
tests/models/validators/v2_2_1/jsd_c2b2882c8fb65284bfc9d781e9ddd07f.py
oboehmer/dnacentersdk
25c4e99900640deee91a56aa886874d9cb0ca960
[ "MIT" ]
32
2019-09-05T05:16:56.000Z
2022-03-22T09:50:38.000Z
tests/models/validators/v2_2_1/jsd_c2b2882c8fb65284bfc9d781e9ddd07f.py
oboehmer/dnacentersdk
25c4e99900640deee91a56aa886874d9cb0ca960
[ "MIT" ]
35
2019-09-07T18:58:54.000Z
2022-03-24T19:29:36.000Z
tests/models/validators/v2_2_1/jsd_c2b2882c8fb65284bfc9d781e9ddd07f.py
oboehmer/dnacentersdk
25c4e99900640deee91a56aa886874d9cb0ca960
[ "MIT" ]
18
2019-09-09T11:07:21.000Z
2022-03-25T08:49:59.000Z
# -*- coding: utf-8 -*- """Cisco DNA Center returnReplacementDevicesCount data model. Copyright (c) 2019-2021 Cisco Systems. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from __future__ import ( absolute_import, division, print_function, unicode_literals, ) import fastjsonschema import json from dnacentersdk.exceptions import MalformedRequest from builtins import * class JSONSchemaValidatorC2B2882C8Fb65284Bfc9D781E9Ddd07F(object): """returnReplacementDevicesCount request schema definition.""" def __init__(self): super(JSONSchemaValidatorC2B2882C8Fb65284Bfc9D781E9Ddd07F, self).__init__() self._validator = fastjsonschema.compile(json.loads( '''{ "$schema": "http://json-schema.org/draft-04/schema#", "properties": { "response": { "type": "integer" }, "version": { "type": "string" } }, "type": "object" }'''.replace("\n" + ' ' * 16, '') )) def validate(self, request): try: self._validator(request) except fastjsonschema.exceptions.JsonSchemaException as e: raise MalformedRequest( '{} is invalid. Reason: {}'.format(request, e.message) )
35.515152
83
0.676195
4a14d96b9002667af72af84ddbf99c83fd1e2dce
3,481
py
Python
nipype/interfaces/diffusion_toolkit/postproc.py
sebastientourbier/nipype
99c5904176481520c5bf42a501aae1a12184e672
[ "Apache-2.0" ]
2
2019-01-25T18:20:51.000Z
2019-07-30T20:51:51.000Z
nipype/interfaces/diffusion_toolkit/postproc.py
sebastientourbier/nipype
99c5904176481520c5bf42a501aae1a12184e672
[ "Apache-2.0" ]
null
null
null
nipype/interfaces/diffusion_toolkit/postproc.py
sebastientourbier/nipype
99c5904176481520c5bf42a501aae1a12184e672
[ "Apache-2.0" ]
2
2018-01-25T19:48:17.000Z
2019-01-25T18:20:52.000Z
# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit Change directory to provide relative paths for doctests >>> import os >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ from __future__ import print_function, division, unicode_literals, absolute_import import os from ..base import (TraitedSpec, File, traits, CommandLine, InputMultiPath, CommandLineInputSpec) __docformat__ = 'restructuredtext' class SplineFilterInputSpec(CommandLineInputSpec): track_file = File(exists=True, desc="file containing tracks to be filtered", position=0, argstr="%s", mandatory=True) step_length = traits.Float(desc="in the unit of minimum voxel size", position=1, argstr="%f", mandatory=True) output_file = File("spline_tracks.trk", desc="target file for smoothed tracks", position=2, argstr="%s", usedefault=True) class SplineFilterOutputSpec(TraitedSpec): smoothed_track_file = File(exists=True) class SplineFilter(CommandLine): """ Smoothes TrackVis track files with a B-Spline filter. Helps remove redundant track points and segments (thus reducing the size of the track file) and also make tracks nicely smoothed. It will NOT change the quality of the tracks or lose any original information. Example ------- >>> import nipype.interfaces.diffusion_toolkit as dtk >>> filt = dtk.SplineFilter() >>> filt.inputs.track_file = 'tracks.trk' >>> filt.inputs.step_length = 0.5 >>> filt.run() # doctest: +SKIP """ input_spec = SplineFilterInputSpec output_spec = SplineFilterOutputSpec _cmd = "spline_filter" def _list_outputs(self): outputs = self.output_spec().get() outputs['smoothed_track_file'] = os.path.abspath(self.inputs.output_file) return outputs class TrackMergeInputSpec(CommandLineInputSpec): track_files = InputMultiPath(File(exists=True), desc="file containing tracks to be filtered", position=0, argstr="%s...", mandatory=True) output_file = File("merged_tracks.trk", desc="target file for merged tracks", position=-1, argstr="%s", usedefault=True) class TrackMergeOutputSpec(TraitedSpec): track_file = File(exists=True) class TrackMerge(CommandLine): """ Merges several TrackVis track files into a single track file. An id type property tag is added to each track in the newly merged file, with each unique id representing where the track was originally from. When the merged file is loaded in TrackVis, a property filter will show up in Track Property panel. Users can adjust that to distinguish and sub-group tracks by its id (origin). Example ------- >>> import nipype.interfaces.diffusion_toolkit as dtk >>> mrg = dtk.TrackMerge() >>> mrg.inputs.track_files = ['track1.trk','track2.trk'] >>> mrg.run() # doctest: +SKIP """ input_spec = TrackMergeInputSpec output_spec = TrackMergeOutputSpec _cmd = "track_merge" def _list_outputs(self): outputs = self.output_spec().get() outputs['track_file'] = os.path.abspath(self.inputs.output_file) return outputs
35.520408
141
0.687733
4a14d97cdfbfe22eeef8972f80e02bf4c0cb3340
1,144
py
Python
data/convert.py
louisjensen/FinalProject588
16985c91a94b668dfa0e8b6b83ccf2922fb7fb92
[ "MIT" ]
1
2020-10-22T16:19:36.000Z
2020-10-22T16:19:36.000Z
data/convert.py
louisjensen/FinalProject588
16985c91a94b668dfa0e8b6b83ccf2922fb7fb92
[ "MIT" ]
null
null
null
data/convert.py
louisjensen/FinalProject588
16985c91a94b668dfa0e8b6b83ccf2922fb7fb92
[ "MIT" ]
1
2020-10-22T17:50:49.000Z
2020-10-22T17:50:49.000Z
import cv2 import sys import os from matplotlib import pyplot as plt import numpy as np from PIL import Image def detect_face(file,filename, images_delete): directory = r'C:\Users\Rob\Documents\College\ECE 588\FinalProject588\data\cropped' img = cv2.imread(file) color = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) faces = face_cascade.detectMultiScale(color, 1.1, 10) if type(faces) == tuple: print("No face detected!") images_delete.append(filename) else: for (x, y, w, h) in faces: cropped = img[y:y+h, x:x+w] print(filename) cv2.imwrite(directory + '\\' + filename, cropped) return images_delete # Load the cascade face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml') # Read the input image images_delete = [] directory = r'C:\Users\Rob\Documents\College\ECE 588\FinalProject588\data\raw' for filename in os.listdir(directory): if filename.endswith(".jpg"): images_delete = detect_face(os.path.join(directory, filename), filename, images_delete) else: images_delete.append(filename) print(images_delete)
28.6
95
0.696678
4a14d9d77120fc4e424e8455497074a4d1c31697
37,712
py
Python
python/ccxt/bitbns.py
ngugcx/ccxt
57133bf1d129f962ed9aa861006257d55e43000c
[ "MIT" ]
null
null
null
python/ccxt/bitbns.py
ngugcx/ccxt
57133bf1d129f962ed9aa861006257d55e43000c
[ "MIT" ]
1
2022-01-27T19:54:13.000Z
2022-01-27T19:54:13.000Z
python/ccxt/bitbns.py
ngugcx/ccxt
57133bf1d129f962ed9aa861006257d55e43000c
[ "MIT" ]
1
2022-03-15T22:51:08.000Z
2022-03-15T22:51:08.000Z
# -*- coding: utf-8 -*- # PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN: # https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code from ccxt.base.exchange import Exchange import hashlib from ccxt.base.errors import ExchangeError from ccxt.base.errors import ArgumentsRequired from ccxt.base.errors import BadRequest from ccxt.base.errors import BadSymbol from ccxt.base.errors import InsufficientFunds from ccxt.base.errors import OrderNotFound from ccxt.base.precise import Precise class bitbns(Exchange): def describe(self): return self.deep_extend(super(bitbns, self).describe(), { 'id': 'bitbns', 'name': 'Bitbns', 'countries': ['IN'], # India 'rateLimit': 1000, 'certified': False, 'pro': False, 'version': 'v2', # new metainfo interface 'has': { 'cancelOrder': True, 'createOrder': True, 'fetchBalance': True, 'fetchDepositAddress': True, 'fetchDeposits': True, 'fetchMarkets': True, 'fetchMyTrades': True, 'fetchOHLCV': None, 'fetchOpenOrders': True, 'fetchOrder': True, 'fetchOrderBook': True, 'fetchStatus': True, 'fetchTicker': 'emulated', 'fetchTickers': True, 'fetchTrades': True, 'fetchWithdrawals': True, }, 'timeframes': { }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/117201933-e7a6e780-adf5-11eb-9d80-98fc2a21c3d6.jpg', 'api': { 'www': 'https://bitbns.com', 'v1': 'https://api.bitbns.com/api/trade/v1', 'v2': 'https://api.bitbns.com/api/trade/v2', }, 'www': 'https://bitbns.com', 'referral': 'https://ref.bitbns.com/1090961', 'doc': [ 'https://bitbns.com/trade/#/api-trading/', ], 'fees': 'https://bitbns.com/fees', }, 'api': { 'www': { 'get': [ 'order/fetchMarkets', 'order/fetchTickers', 'order/fetchOrderbook', 'order/getTickerWithVolume', 'exchangeData/ohlc', # ?coin=${coin_name}&page=${page} 'exchangeData/orderBook', 'exchangeData/tradedetails', ], }, 'v1': { 'get': [ 'platform/status', 'tickers', 'orderbook/sell/{symbol}', 'orderbook/buy/{symbol}', ], 'post': [ 'currentCoinBalance/EVERYTHING', 'getApiUsageStatus/USAGE', 'getOrderSocketToken/USAGE', 'currentCoinBalance/{symbol}', 'orderStatus/{symbol}', 'depositHistory/{symbol}', 'withdrawHistory/{symbol}', 'withdrawHistoryAll/{symbol}', 'depositHistoryAll/{symbol}', 'listOpenOrders/{symbol}', 'listOpenStopOrders/{symbol}', 'getCoinAddress/{symbol}', 'placeSellOrder/{symbol}', 'placeBuyOrder/{symbol}', 'buyStopLoss/{symbol}', 'sellStopLoss/{symbol}', 'placeSellOrder/{symbol}', 'cancelOrder/{symbol}', 'cancelStopLossOrder/{symbol}', 'listExecutedOrders/{symbol}', 'placeMarketOrder/{symbol}', 'placeMarketOrderQnty/{symbol}', ], }, 'v2': { 'post': [ 'orders', 'cancel', 'getordersnew', 'marginOrders', ], }, }, 'fees': { 'trading': { 'feeSide': 'quote', 'tierBased': False, 'percentage': True, 'taker': self.parse_number('0.0025'), 'maker': self.parse_number('0.0025'), }, }, 'exceptions': { 'exact': { '400': BadRequest, # {"msg":"Invalid Request","status":-1,"code":400} '409': BadSymbol, # {"data":"","status":0,"error":"coin name not supplied or not yet supported","code":409} '416': InsufficientFunds, # {"data":"Oops ! Not sufficient currency to sell","status":0,"error":null,"code":416} '417': OrderNotFound, # {"data":[],"status":0,"error":"Nothing to show","code":417} }, 'broad': {}, }, }) def fetch_status(self, params={}): response = self.v1GetPlatformStatus(params) # # { # "data":{ # "BTC":{"status":1}, # "ETH":{"status":1}, # "XRP":{"status":1}, # }, # "status":1, # "error":null, # "code":200 # } # status = self.safe_string(response, 'status') if status is not None: status = 'ok' if (status == '1') else 'maintenance' self.status = self.extend(self.status, { 'status': status, 'updated': self.milliseconds(), }) return self.status def fetch_markets(self, params={}): response = self.wwwGetOrderFetchMarkets(params) # # [ # { # "id":"BTC", # "symbol":"BTC/INR", # "base":"BTC", # "quote":"INR", # "baseId":"BTC", # "quoteId":"", # "active":true, # "limits":{ # "amount":{"min":"0.00017376","max":20}, # "price":{"min":2762353.2359999996,"max":6445490.883999999}, # "cost":{"min":800,"max":128909817.67999998} # }, # "precision":{ # "amount":8, # "price":2 # }, # "info":{} # }, # ] # result = [] for i in range(0, len(response)): market = response[i] id = self.safe_string(market, 'id') baseId = self.safe_string(market, 'base') quoteId = self.safe_string(market, 'quote') base = self.safe_currency_code(baseId) quote = self.safe_currency_code(quoteId) symbol = base + '/' + quote marketPrecision = self.safe_value(market, 'precision', {}) precision = { 'amount': self.safe_integer(marketPrecision, 'amount'), 'price': self.safe_integer(marketPrecision, 'price'), } marketLimits = self.safe_value(market, 'limits', {}) amountLimits = self.safe_value(marketLimits, 'amount', {}) priceLimits = self.safe_value(marketLimits, 'price', {}) costLimits = self.safe_value(marketLimits, 'cost', {}) usdt = (quoteId == 'USDT') # INR markets don't need a _INR prefix uppercaseId = (baseId + '_' + quoteId) if usdt else baseId result.append({ 'id': id, 'uppercaseId': uppercaseId, 'symbol': symbol, 'base': base, 'quote': quote, 'baseId': baseId, 'quoteId': quoteId, 'info': market, 'type': 'spot', 'spot': True, 'active': None, 'precision': precision, 'limits': { 'amount': { 'min': self.safe_number(amountLimits, 'min'), 'max': self.safe_number(amountLimits, 'max'), }, 'price': { 'min': self.safe_number(priceLimits, 'min'), 'max': self.safe_number(priceLimits, 'max'), }, 'cost': { 'min': self.safe_number(costLimits, 'min'), 'max': self.safe_number(costLimits, 'max'), }, }, }) return result def fetch_order_book(self, symbol, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit # default 100, max 5000, see https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#order-book response = self.wwwGetOrderFetchOrderbook(self.extend(request, params)) # # { # "bids":[ # [49352.04,0.843948], # [49352.03,0.742048], # [49349.78,0.686239], # ], # "asks":[ # [49443.59,0.065137], # [49444.63,0.098211], # [49449.01,0.066309], # ], # "timestamp":1619172786577, # "datetime":"2021-04-23T10:13:06.577Z", # "nonce":"" # } # timestamp = self.safe_integer(response, 'timestamp') return self.parse_order_book(response, timestamp) def parse_ticker(self, ticker, market=None): # # { # "symbol":"BTC/INR", # "info":{ # "highest_buy_bid":4368494.31, # "lowest_sell_bid":4374835.09, # "last_traded_price":4374835.09, # "yes_price":4531016.27, # "volume":{"max":"4569119.23","min":"4254552.13","volume":62.17722344} # }, # "timestamp":1619100020845, # "datetime":1619100020845, # "high":"4569119.23", # "low":"4254552.13", # "bid":4368494.31, # "bidVolume":"", # "ask":4374835.09, # "askVolume":"", # "vwap":"", # "open":4531016.27, # "close":4374835.09, # "last":4374835.09, # "baseVolume":62.17722344, # "quoteVolume":"", # "previousClose":"", # "change":-156181.1799999997, # "percentage":-3.446934874943623, # "average":4452925.68 # } # timestamp = self.safe_integer(ticker, 'timestamp') marketId = self.safe_string(ticker, 'symbol') symbol = self.safe_symbol(marketId, market) last = self.safe_number(ticker, 'last') return self.safe_ticker({ 'symbol': symbol, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'high': self.safe_number(ticker, 'high'), 'low': self.safe_number(ticker, 'low'), 'bid': self.safe_number(ticker, 'bid'), 'bidVolume': self.safe_number(ticker, 'bidVolume'), 'ask': self.safe_number(ticker, 'ask'), 'askVolume': self.safe_number(ticker, 'askVolume'), 'vwap': self.safe_number(ticker, 'vwap'), 'open': self.safe_number(ticker, 'open'), 'close': last, 'last': last, 'previousClose': self.safe_number(ticker, 'previousClose'), # previous day close 'change': self.safe_number(ticker, 'change'), 'percentage': self.safe_number(ticker, 'percentage'), 'average': self.safe_number(ticker, 'average'), 'baseVolume': self.safe_number(ticker, 'baseVolume'), 'quoteVolume': self.safe_number(ticker, 'quoteVolume'), 'info': ticker, }, market) def fetch_tickers(self, symbols=None, params={}): self.load_markets() response = self.wwwGetOrderFetchTickers(params) # # { # "BTC/INR":{ # "symbol":"BTC/INR", # "info":{ # "highest_buy_bid":4368494.31, # "lowest_sell_bid":4374835.09, # "last_traded_price":4374835.09, # "yes_price":4531016.27, # "volume":{"max":"4569119.23","min":"4254552.13","volume":62.17722344} # }, # "timestamp":1619100020845, # "datetime":1619100020845, # "high":"4569119.23", # "low":"4254552.13", # "bid":4368494.31, # "bidVolume":"", # "ask":4374835.09, # "askVolume":"", # "vwap":"", # "open":4531016.27, # "close":4374835.09, # "last":4374835.09, # "baseVolume":62.17722344, # "quoteVolume":"", # "previousClose":"", # "change":-156181.1799999997, # "percentage":-3.446934874943623, # "average":4452925.68 # } # } # return self.parse_tickers(response, symbols) def parse_balance(self, response): timestamp = None result = { 'info': response, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), } data = self.safe_value(response, 'data', {}) keys = list(data.keys()) for i in range(0, len(keys)): key = keys[i] parts = key.split('availableorder') numParts = len(parts) if numParts > 1: currencyId = self.safe_string(parts, 1) if currencyId != 'Money': code = self.safe_currency_code(currencyId) account = self.account() account['free'] = self.safe_string(data, key) account['used'] = self.safe_string(data, 'inorder' + currencyId) result[code] = account return self.safe_balance(result) def fetch_balance(self, params={}): self.load_markets() response = self.v1PostCurrentCoinBalanceEVERYTHING(params) # # { # "data":{ # "availableorderMoney":0, # "availableorderBTC":0, # "availableorderXRP":0, # "inorderMoney":0, # "inorderBTC":0, # "inorderXRP":0, # "inorderNEO":0, # }, # "status":1, # "error":null, # "code":200 # } # return self.parse_balance(response) def parse_order_status(self, status): statuses = { '0': 'open', # 'PARTIALLY_FILLED': 'open', # 'FILLED': 'closed', # 'CANCELED': 'canceled', # 'PENDING_CANCEL': 'canceling', # currently unused # 'REJECTED': 'rejected', # 'EXPIRED': 'expired', } return self.safe_string(statuses, status, status) def parse_order(self, order, market=None): # # createOrder # # { # "data":"Successfully placed bid to purchase currency", # "status":1, # "error":null, # "id":5424475, # "code":200 # } # # fetchOrder # # { # "entry_id":5424475, # "btc":0.01, # "rate":2000, # "time":"2021-04-25T17:05:42.000Z", # "type":0, # "status":0, # "total":0.01, # "avg_cost":null, # "side":"BUY", # "amount":0.01, # "remaining":0.01, # "filled":0, # "cost":null, # "fee":0.05 # } # # fetchOpenOrders # # { # "entry_id":5424475, # "btc":0.01, # "rate":2000, # "time":"2021-04-25T17:05:42.000Z", # "type":0, # "status":0 # } # id = self.safe_string_2(order, 'id', 'entry_id') marketId = self.safe_string(order, 'symbol') symbol = self.safe_symbol(marketId, market) timestamp = self.parse8601(self.safe_string(order, 'time')) price = self.safe_string(order, 'rate') amount = self.safe_string_2(order, 'amount', 'btc') filled = self.safe_string(order, 'filled') remaining = self.safe_string(order, 'remaining') average = self.safe_string(order, 'avg_cost') cost = self.safe_string(order, 'cost') type = self.safe_string_lower(order, 'type') if type == '0': type = 'limit' status = self.parse_order_status(self.safe_string(order, 'status')) side = self.safe_string_lower(order, 'side') feeCost = self.safe_number(order, 'fee') fee = None if feeCost is not None: feeCurrencyCode = None fee = { 'cost': feeCost, 'currency': feeCurrencyCode, } return self.safe_order({ 'info': order, 'id': id, 'clientOrderId': None, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'lastTradeTimestamp': None, 'symbol': symbol, 'type': type, 'timeInForce': None, 'postOnly': None, 'side': side, 'price': price, 'stopPrice': None, 'amount': amount, 'cost': cost, 'average': average, 'filled': filled, 'remaining': remaining, 'status': status, 'fee': fee, 'trades': None, }, market) def create_order(self, symbol, type, side, amount, price=None, params={}): if type != 'limit' and type != 'market': raise ExchangeError(self.id + ' allows limit and market orders only') self.load_markets() market = self.market(symbol) request = { 'side': side.upper(), 'symbol': market['uppercaseId'], 'quantity': self.amount_to_precision(symbol, amount), # 'target_rate': self.price_to_precision(symbol, targetRate), # 't_rate': self.price_to_precision(symbol, stopPrice), # 'trail_rate': self.price_to_precision(symbol, trailRate), # To Place Simple Buy or Sell Order use rate # To Place Stoploss Buy or Sell Order use rate & t_rate # To Place Bracket Buy or Sell Order use rate , t_rate, target_rate & trail_rate } method = 'v2PostOrders' if type == 'limit': request['rate'] = self.price_to_precision(symbol, price) elif type == 'market': method = 'v1PostPlaceMarketOrderQntySymbol' request['market'] = market['quoteId'] else: raise ExchangeError(self.id + ' allows limit and market orders only') response = getattr(self, method)(self.extend(request, params)) # # { # "data":"Successfully placed bid to purchase currency", # "status":1, # "error":null, # "id":5424475, # "code":200 # } # return self.parse_order(response, market) def cancel_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument') self.load_markets() market = self.market(symbol) quoteSide = 'usdtcancelOrder' if (market['quoteId'] == 'USDT') else 'cancelOrder' request = { 'entry_id': id, 'symbol': market['uppercaseId'], 'side': quoteSide, } response = self.v2PostCancel(self.extend(request, params)) return self.parse_order(response, market) def fetch_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], 'entry_id': id, } response = self.v1PostOrderStatusSymbol(self.extend(request, params)) # # { # "data":[ # { # "entry_id":5424475, # "btc":0.01, # "rate":2000, # "time":"2021-04-25T17:05:42.000Z", # "type":0, # "status":0, # "total":0.01, # "avg_cost":null, # "side":"BUY", # "amount":0.01, # "remaining":0.01, # "filled":0, # "cost":null, # "fee":0.05 # } # ], # "status":1, # "error":null, # "code":200 # } # data = self.safe_value(response, 'data', []) first = self.safe_value(data, 0) return self.parse_order(first, market) def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument') self.load_markets() market = self.market(symbol) quoteSide = 'usdtListOpenOrders' if (market['quoteId'] == 'USDT') else 'listOpenOrders' request = { 'symbol': market['uppercaseId'], 'side': quoteSide, 'page': 0, } response = self.v2PostGetordersnew(self.extend(request, params)) # # { # "data":[ # { # "entry_id":5424475, # "btc":0.01, # "rate":2000, # "time":"2021-04-25T17:05:42.000Z", # "type":0, # "status":0 # } # ], # "status":1, # "error":null, # "code":200 # } # data = self.safe_value(response, 'data', []) return self.parse_orders(data, market, since, limit) def parse_trade(self, trade, market=None): # # fetchMyTrades # # { # "type": "BTC Sell order executed", # "typeI": 6, # "crypto": 5000, # "amount": 35.4, # "rate": 709800, # "date": "2020-05-22T15:05:34.000Z", # "unit": "INR", # "factor": 100000000, # "fee": 0.09, # "delh_btc": -5000, # "delh_inr": 0, # "del_btc": 0, # "del_inr": 35.4, # "id": "2938823" # } # # fetchTrades # # { # "tradeId":"1909151", # "price":"61904.6300", # "quote_volume":1618.05, # "base_volume":0.02607254, # "timestamp":1634548602000, # "type":"buy" # } # market = self.safe_market(None, market) orderId = self.safe_string_2(trade, 'id', 'tradeId') timestamp = self.parse8601(self.safe_string(trade, 'date')) timestamp = self.safe_integer(trade, 'timestamp', timestamp) priceString = self.safe_string_2(trade, 'rate', 'price') amountString = self.safe_string(trade, 'amount') side = self.safe_string_lower(trade, 'type') if side is not None: if side.find('buy') >= 0: side = 'buy' elif side.find('sell') >= 0: side = 'sell' factor = self.safe_string(trade, 'factor') costString = None if factor is not None: amountString = Precise.string_div(amountString, factor) else: amountString = self.safe_string(trade, 'base_volume') costString = self.safe_string(trade, 'quote_volume') symbol = market['symbol'] fee = None feeCostString = self.safe_string(trade, 'fee') if feeCostString is not None: feeCurrencyCode = market['quote'] fee = { 'cost': feeCostString, 'currency': feeCurrencyCode, } return self.safe_trade({ 'info': trade, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'id': orderId, 'order': orderId, 'type': None, 'side': side, 'takerOrMaker': None, 'price': priceString, 'amount': amountString, 'cost': costString, 'fee': fee, }, market) def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], 'page': 0, } if since is not None: request['since'] = self.iso8601(since) response = self.v1PostListExecutedOrdersSymbol(self.extend(request, params)) # # { # "data": [ # { # "type": "BTC Sell order executed", # "typeI": 6, # "crypto": 5000, # "amount": 35.4, # "rate": 709800, # "date": "2020-05-22T15:05:34.000Z", # "unit": "INR", # "factor": 100000000, # "fee": 0.09, # "delh_btc": -5000, # "delh_inr": 0, # "del_btc": 0, # "del_inr": 35.4, # "id": "2938823" # }, # { # "type": "BTC Sell order executed", # "typeI": 6, # "crypto": 195000, # "amount": 1380.58, # "rate": 709765.5, # "date": "2020-05-22T15:05:34.000Z", # "unit": "INR", # "factor": 100000000, # "fee": 3.47, # "delh_btc": -195000, # "delh_inr": 0, # "del_btc": 0, # "del_inr": 1380.58, # "id": "2938823" # } # ], # "status": 1, # "error": null, # "code": 200 # } # data = self.safe_value(response, 'data', []) return self.parse_trades(data, market, since, limit) def fetch_trades(self, symbol, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchTrades() requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'coin': market['baseId'], 'market': market['quoteId'], } response = self.wwwGetExchangeDataTradedetails(self.extend(request, params)) # # [ # {"tradeId":"1909151","price":"61904.6300","quote_volume":1618.05,"base_volume":0.02607254,"timestamp":1634548602000,"type":"buy"}, # {"tradeId":"1909153","price":"61893.9000","quote_volume":16384.42,"base_volume":0.26405767,"timestamp":1634548999000,"type":"sell"}, # {"tradeId":"1909155","price":"61853.1100","quote_volume":2304.37,"base_volume":0.03716263,"timestamp":1634549670000,"type":"sell"} # } # return self.parse_trades(response, market, since, limit) def fetch_deposits(self, code=None, since=None, limit=None, params={}): if code is None: raise ArgumentsRequired(self.id + ' fetchDeposits() requires a currency code argument') self.load_markets() currency = self.currency(code) request = { 'symbol': currency['id'], 'page': 0, } response = self.v1PostDepositHistorySymbol(self.extend(request, params)) # # { # "data":[ # { # "type":"USDT deposited", # "typeI":1, # "amount":100, # "date":"2021-04-24T14:56:04.000Z", # "unit":"USDT", # "factor":100, # "fee":0, # "delh_btc":0, # "delh_inr":0, # "rate":0, # "del_btc":10000, # "del_inr":0 # } # ], # "status":1, # "error":null, # "code":200 # } # data = self.safe_value(response, 'data', []) return self.parse_transactions(data, currency, since, limit) def fetch_withdrawals(self, code=None, since=None, limit=None, params={}): if code is None: raise ArgumentsRequired(self.id + ' fetchWithdrawals() requires a currency code argument') self.load_markets() currency = self.currency(code) request = { 'symbol': currency['id'], 'page': 0, } response = self.v1PostWithdrawHistorySymbol(self.extend(request, params)) # # ... # data = self.safe_value(response, 'data', []) return self.parse_transactions(data, currency, since, limit) def parse_transaction_status_by_type(self, status, type=None): statusesByType = { 'deposit': { '0': 'pending', '1': 'ok', }, 'withdrawal': { '0': 'pending', # Email Sent '1': 'canceled', # Cancelled(different from 1 = ok in deposits) '2': 'pending', # Awaiting Approval '3': 'failed', # Rejected '4': 'pending', # Processing '5': 'failed', # Failure '6': 'ok', # Completed }, } statuses = self.safe_value(statusesByType, type, {}) return self.safe_string(statuses, status, status) def parse_transaction(self, transaction, currency=None): # # fetchDeposits # # { # "type":"USDT deposited", # "typeI":1, # "amount":100, # "date":"2021-04-24T14:56:04.000Z", # "unit":"USDT", # "factor":100, # "fee":0, # "delh_btc":0, # "delh_inr":0, # "rate":0, # "del_btc":10000, # "del_inr":0 # } # # fetchWithdrawals # # ... # currencyId = self.safe_string(transaction, 'unit') code = self.safe_currency_code(currencyId, currency) timestamp = self.parse8601(self.safe_string(transaction, 'date')) type = self.safe_string(transaction, 'type') status = None if type is not None: if type.find('deposit') >= 0: type = 'deposit' status = 'ok' elif type.find('withdraw') >= 0: type = 'withdrawal' # status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type) amount = self.safe_number(transaction, 'amount') feeCost = self.safe_number(transaction, 'fee') fee = None if feeCost is not None: fee = {'currency': code, 'cost': feeCost} return { 'info': transaction, 'id': None, 'txid': None, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'network': None, 'address': None, 'addressTo': None, 'addressFrom': None, 'tag': None, 'tagTo': None, 'tagFrom': None, 'type': type, 'amount': amount, 'currency': code, 'status': status, 'updated': None, 'internal': None, 'fee': fee, } def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) request = { 'symbol': currency['id'], } response = self.v1PostGetCoinAddressSymbol(self.extend(request, params)) # # { # "data":{ # "token":"0x680dee9edfff0c397736e10b017cf6a0aee4ba31", # "expiry":"2022-04-24 22:30:11" # }, # "status":1, # "error":null # } # data = self.safe_value(response, 'data', {}) address = self.safe_string(data, 'token') tag = self.safe_string(data, 'tag') self.check_address(address) return { 'currency': code, 'address': address, 'tag': tag, 'network': None, 'info': response, } def nonce(self): return self.milliseconds() def sign(self, path, api='www', method='GET', params={}, headers=None, body=None): if not (api in self.urls['api']): raise ExchangeError(self.id + ' does not have a testnet/sandbox URL for ' + api + ' endpoints') if api != 'www': self.check_required_credentials() headers = { 'X-BITBNS-APIKEY': self.apiKey, } baseUrl = self.implode_hostname(self.urls['api'][api]) url = baseUrl + '/' + self.implode_params(path, params) query = self.omit(params, self.extract_params(path)) nonce = str(self.nonce()) if method == 'GET': if query: url += '?' + self.urlencode(query) elif method == 'POST': if query: body = self.json(query) else: body = '{}' auth = { 'timeStamp_nonce': nonce, 'body': body, } payload = self.string_to_base64(self.json(auth)) signature = self.hmac(payload, self.encode(self.secret), hashlib.sha512) headers['X-BITBNS-PAYLOAD'] = self.decode(payload) headers['X-BITBNS-SIGNATURE'] = signature headers['Content-Type'] = 'application/x-www-form-urlencoded' return {'url': url, 'method': method, 'body': body, 'headers': headers} def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody): if response is None: return # fallback to default error handler # # {"msg":"Invalid Request","status":-1,"code":400} # {"data":[],"status":0,"error":"Nothing to show","code":417} # code = self.safe_string(response, 'code') message = self.safe_string(response, 'msg') error = (code is not None) and (code != '200') if error or (message is not None): feedback = self.id + ' ' + body self.throw_exactly_matched_exception(self.exceptions['exact'], code, feedback) self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback) self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback) raise ExchangeError(feedback) # unknown message
38.17004
163
0.445773
4a14dacf68e8b1f9010dc2f59ba65a17dc63227f
41,282
py
Python
jina/helper.py
Ruchip16/jina
24c38a5c330453fb3ebd95f4f4f977b501b21240
[ "Apache-2.0" ]
1
2022-02-09T14:14:06.000Z
2022-02-09T14:14:06.000Z
jina/helper.py
Ruchip16/jina
24c38a5c330453fb3ebd95f4f4f977b501b21240
[ "Apache-2.0" ]
null
null
null
jina/helper.py
Ruchip16/jina
24c38a5c330453fb3ebd95f4f4f977b501b21240
[ "Apache-2.0" ]
null
null
null
import asyncio import functools import inspect import json import math import os import random import re import sys import threading import time import uuid import warnings from argparse import ArgumentParser, Namespace from collections.abc import MutableMapping from datetime import datetime from itertools import islice from types import SimpleNamespace from typing import ( Callable, Tuple, Optional, Iterator, Any, Union, List, Dict, Set, Sequence, Iterable, TypeVar, TYPE_CHECKING, ) from jina import __windows__ __all__ = [ 'batch_iterator', 'parse_arg', 'random_port', 'random_identity', 'random_uuid', 'expand_env_var', 'colored', 'ArgNamespace', 'is_valid_local_config_source', 'cached_property', 'typename', 'get_public_ip', 'get_internal_ip', 'convert_tuple_to_list', 'run_async', 'deprecated_alias', 'countdown', 'CatchAllCleanupContextManager', 'download_mermaid_url', 'get_readable_size', 'get_or_reuse_loop', 'T', ] if TYPE_CHECKING: from docarray import DocumentArray T = TypeVar('T') def deprecated_alias(**aliases): """ Usage, kwargs with key as the deprecated arg name and value be a tuple, (new_name, deprecate_level). With level 0 means warning, level 1 means exception. For example: .. highlight:: python .. code-block:: python @deprecated_alias(input_fn=('inputs', 0), buffer=('input_fn', 0), callback=('on_done', 1), output_fn=('on_done', 1)) :param aliases: maps aliases to new arguments :return: wrapper """ from jina.excepts import NotSupportedError def _rename_kwargs(func_name: str, kwargs, aliases): """ Raise warnings or exceptions for deprecated arguments. :param func_name: Name of the function. :param kwargs: key word arguments from the function which is decorated. :param aliases: kwargs with key as the deprecated arg name and value be a tuple, (new_name, deprecate_level). """ for alias, new_arg in aliases.items(): if not isinstance(new_arg, tuple): raise ValueError( f'{new_arg} must be a tuple, with first element as the new name, ' f'second element as the deprecated level: 0 as warning, 1 as exception' ) if alias in kwargs: new_name, dep_level = new_arg if new_name in kwargs: raise NotSupportedError( f'{func_name} received both {alias} and {new_name}' ) if dep_level == 0: warnings.warn( f'`{alias}` is renamed to `{new_name}` in `{func_name}()`, the usage of `{alias}` is ' f'deprecated and will be removed in the next version.', DeprecationWarning, ) kwargs[new_name] = kwargs.pop(alias) elif dep_level == 1: raise NotSupportedError(f'{alias} has been renamed to `{new_name}`') def deco(f): """ Set Decorator function. :param f: function the decorator is used for :return: wrapper """ @functools.wraps(f) def wrapper(*args, **kwargs): """ Set wrapper function. :param args: wrapper arguments :param kwargs: wrapper key word arguments :return: result of renamed function. """ _rename_kwargs(f.__name__, kwargs, aliases) return f(*args, **kwargs) return wrapper return deco def deprecated_method(new_function_name): def deco(func): def wrapper(*args, **kwargs): warnings.warn( f'`{func.__name__}` is renamed to `{new_function_name}`, the usage of `{func.__name__}` is ' f'deprecated and will be removed.', DeprecationWarning, ) return func(*args, **kwargs) return wrapper return deco def get_readable_size(num_bytes: Union[int, float]) -> str: """ Transform the bytes into readable value with different units (e.g. 1 KB, 20 MB, 30.1 GB). :param num_bytes: Number of bytes. :return: Human readable string representation. """ num_bytes = int(num_bytes) if num_bytes < 1024: return f'{num_bytes} Bytes' elif num_bytes < 1024 ** 2: return f'{num_bytes / 1024:.1f} KB' elif num_bytes < 1024 ** 3: return f'{num_bytes / (1024 ** 2):.1f} MB' else: return f'{num_bytes / (1024 ** 3):.1f} GB' def batch_iterator( data: Iterable[Any], batch_size: int, axis: int = 0, ) -> Iterator[Any]: """ Get an iterator of batches of data. For example: .. highlight:: python .. code-block:: python for req in batch_iterator(data, batch_size, split_over_axis): # Do something with batch :param data: Data source. :param batch_size: Size of one batch. :param axis: Determine which axis to iterate for np.ndarray data. :yield: data :return: An Iterator of batch data. """ import numpy as np if not batch_size or batch_size <= 0: yield data return if isinstance(data, np.ndarray): _l = data.shape[axis] _d = data.ndim sl = [slice(None)] * _d if batch_size >= _l: yield data return for start in range(0, _l, batch_size): end = min(_l, start + batch_size) sl[axis] = slice(start, end) yield data[tuple(sl)] elif isinstance(data, Sequence): if batch_size >= len(data): yield data return for _ in range(0, len(data), batch_size): yield data[_ : _ + batch_size] elif isinstance(data, Iterable): # as iterator, there is no way to know the length of it iterator = iter(data) while True: chunk = tuple(islice(iterator, batch_size)) if not chunk: return yield chunk else: raise TypeError(f'unsupported type: {type(data)}') def parse_arg(v: str) -> Optional[Union[bool, int, str, list, float]]: """ Parse the arguments from string to `Union[bool, int, str, list, float]`. :param v: The string of arguments :return: The parsed arguments list. """ m = re.match(r'^[\'"](.*)[\'"]$', v) if m: return m.group(1) if v.startswith('[') and v.endswith(']'): # function args must be immutable tuples not list tmp = v.replace('[', '').replace(']', '').strip().split(',') if len(tmp) > 0: return [parse_arg(vv.strip()) for vv in tmp] else: return [] try: v = int(v) # parse int parameter except ValueError: try: v = float(v) # parse float parameter except ValueError: if len(v) == 0: # ignore it when the parameter is empty v = None elif v.lower() == 'true': # parse boolean parameter v = True elif v.lower() == 'false': v = False return v def countdown(t: int, reason: str = 'I am blocking this thread') -> None: """ Display the countdown in console. For example: .. highlight:: python .. code-block:: python countdown(10, reason=colored('re-fetch access token', 'cyan', attrs=['bold', 'reverse'])) :param t: Countdown time. :param reason: A string message of reason for this Countdown. """ try: sys.stdout.write('\n') sys.stdout.flush() while t > 0: t -= 1 msg = f'⏳ {colored("%3d" % t, "yellow")}s left: {reason}' sys.stdout.write(f'\r{msg}') sys.stdout.flush() time.sleep(1) sys.stdout.write('\n') sys.stdout.flush() except KeyboardInterrupt: sys.stdout.write('no more patience? good bye!') _random_names = ( ( 'first', 'great', 'local', 'small', 'right', 'large', 'young', 'early', 'major', 'clear', 'black', 'whole', 'third', 'white', 'short', 'human', 'royal', 'wrong', 'legal', 'final', 'close', 'total', 'prime', 'happy', 'sorry', 'basic', 'aware', 'ready', 'green', 'heavy', 'extra', 'civil', 'chief', 'usual', 'front', 'fresh', 'joint', 'alone', 'rural', 'light', 'equal', 'quiet', 'quick', 'daily', 'urban', 'upper', 'moral', 'vital', 'empty', 'brief', ), ( 'world', 'house', 'place', 'group', 'party', 'money', 'point', 'state', 'night', 'water', 'thing', 'order', 'power', 'court', 'level', 'child', 'south', 'staff', 'woman', 'north', 'sense', 'death', 'range', 'table', 'trade', 'study', 'other', 'price', 'class', 'union', 'value', 'paper', 'right', 'voice', 'stage', 'light', 'march', 'board', 'month', 'music', 'field', 'award', 'issue', 'basis', 'front', 'heart', 'force', 'model', 'space', 'peter', ), ) def random_name() -> str: """ Generate a random name from list. :return: A Random name. """ return '_'.join(random.choice(_random_names[j]) for j in range(2)) assigned_ports = set() unassigned_ports = [] DEFAULT_MIN_PORT = 49153 MAX_PORT = 65535 def reset_ports(): def _get_unassigned_ports(): # if we are running out of ports, lower default minimum port if MAX_PORT - DEFAULT_MIN_PORT - len(assigned_ports) < 100: min_port = int(os.environ.get('JINA_RANDOM_PORT_MIN', '16384')) else: min_port = int( os.environ.get('JINA_RANDOM_PORT_MIN', str(DEFAULT_MIN_PORT)) ) max_port = int(os.environ.get('JINA_RANDOM_PORT_MAX', str(MAX_PORT))) return set(range(min_port, max_port + 1)) - set(assigned_ports) unassigned_ports.clear() assigned_ports.clear() unassigned_ports.extend(_get_unassigned_ports()) random.shuffle(unassigned_ports) def random_port() -> Optional[int]: """ Get a random available port number. :return: A random port. """ def _random_port(): import socket def _check_bind(port): with socket.socket() as s: try: s.bind(('', port)) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) return port except OSError: return None _port = None if len(unassigned_ports) == 0: reset_ports() for idx, _port in enumerate(unassigned_ports): if _check_bind(_port) is not None: break else: raise OSError( f'can not find an available port in {len(unassigned_ports)} unassigned ports, assigned already {len(assigned_ports)} ports' ) int_port = int(_port) unassigned_ports.pop(idx) assigned_ports.add(int_port) return int_port try: return _random_port() except OSError: assigned_ports.clear() unassigned_ports.clear() return _random_port() def random_identity(use_uuid1: bool = False) -> str: """ Generate random UUID. ..note:: A MAC address or time-based ordering (UUID1) can afford increased database performance, since it's less work to sort numbers closer-together than those distributed randomly (UUID4) (see here). A second related issue, is that using UUID1 can be useful in debugging, even if origin data is lost or not explicitly stored. :param use_uuid1: use UUID1 instead of UUID4. This is the default Document ID generator. :return: A random UUID. """ return random_uuid(use_uuid1).hex def random_uuid(use_uuid1: bool = False) -> uuid.UUID: """ Get a random UUID. :param use_uuid1: Use UUID1 if True, else use UUID4. :return: A random UUID. """ return uuid.uuid1() if use_uuid1 else uuid.uuid4() def expand_env_var(v: str) -> Optional[Union[bool, int, str, list, float]]: """ Expand the environment variables. :param v: String of environment variables. :return: Parsed environment variables. """ if isinstance(v, str): return parse_arg(os.path.expandvars(v)) else: return v def expand_dict( d: Dict, expand_fn=expand_env_var, resolve_cycle_ref=True ) -> Dict[str, Any]: """ Expand variables from YAML file. :param d: Target Dict. :param expand_fn: Parsed environment variables. :param resolve_cycle_ref: Defines if cyclic references should be resolved. :return: Expanded variables. """ expand_map = SimpleNamespace() pat = re.compile(r'{.+}|\$[a-zA-Z0-9_]*\b') def _scan(sub_d: Union[Dict, List], p): if isinstance(sub_d, dict): for k, v in sub_d.items(): if isinstance(v, dict): p.__dict__[k] = SimpleNamespace() _scan(v, p.__dict__[k]) elif isinstance(v, list): p.__dict__[k] = list() _scan(v, p.__dict__[k]) else: p.__dict__[k] = v elif isinstance(sub_d, list): for idx, v in enumerate(sub_d): if isinstance(v, dict): p.append(SimpleNamespace()) _scan(v, p[idx]) elif isinstance(v, list): p.append(list()) _scan(v, p[idx]) else: p.append(v) def _replace(sub_d: Union[Dict, List], p): if isinstance(sub_d, Dict): for k, v in sub_d.items(): if isinstance(v, (dict, list)): _replace(v, p.__dict__[k]) else: if isinstance(v, str) and pat.findall(v): sub_d[k] = _sub(v, p) elif isinstance(sub_d, List): for idx, v in enumerate(sub_d): if isinstance(v, (dict, list)): _replace(v, p[idx]) else: if isinstance(v, str) and pat.findall(v): sub_d[idx] = _sub(v, p) def _sub(v, p): if resolve_cycle_ref: try: v = v.format(root=expand_map, this=p) except KeyError: pass return expand_fn(v) _scan(d, expand_map) _replace(d, expand_map) return d _ATTRIBUTES = { 'bold': 1, 'dark': 2, 'underline': 4, 'blink': 5, 'reverse': 7, 'concealed': 8, } _HIGHLIGHTS = { 'on_grey': 40, 'on_red': 41, 'on_green': 42, 'on_yellow': 43, 'on_blue': 44, 'on_magenta': 45, 'on_cyan': 46, 'on_white': 47, } _COLORS = { 'black': 30, 'red': 31, 'green': 32, 'yellow': 33, 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, } _RESET = '\033[0m' if __windows__: os.system('color') def colored( text: str, color: Optional[str] = None, on_color: Optional[str] = None, attrs: Optional[Union[str, list]] = None, ) -> str: """ Give the text with color. :param text: The target text. :param color: The color of text. Chosen from the following. { 'grey': 30, 'red': 31, 'green': 32, 'yellow': 33, 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37 } :param on_color: The on_color of text. Chosen from the following. { 'on_grey': 40, 'on_red': 41, 'on_green': 42, 'on_yellow': 43, 'on_blue': 44, 'on_magenta': 45, 'on_cyan': 46, 'on_white': 47 } :param attrs: Attributes of color. Chosen from the following. { 'bold': 1, 'dark': 2, 'underline': 4, 'blink': 5, 'reverse': 7, 'concealed': 8 } :return: Colored text. """ if 'JINA_LOG_NO_COLOR' not in os.environ: fmt_str = '\033[%dm%s' if color: text = fmt_str % (_COLORS[color], text) if on_color: text = fmt_str % (_HIGHLIGHTS[on_color], text) if attrs: if isinstance(attrs, str): attrs = [attrs] if isinstance(attrs, list): for attr in attrs: text = fmt_str % (_ATTRIBUTES[attr], text) text += _RESET return text class ColorContext: def __init__(self, color: str, bold: Optional[bool] = False): self._color = color self._bold = bold def __enter__(self): if self._bold: fmt_str = '\033[1;%dm' else: fmt_str = '\033[0;%dm' c = fmt_str % (_COLORS[self._color]) print(c, flush=True, end='') return self def __exit__(self, typ, value, traceback): print(_RESET, flush=True, end='') def warn_unknown_args(unknown_args: List[str]): """Creates warnings for all given arguments. :param unknown_args: arguments that are possibly unknown to Jina """ from cli.lookup import _build_lookup_table all_args = _build_lookup_table()[0] has_migration_tip = False real_unknown_args = [] warn_strs = [] for arg in unknown_args: if arg.replace('--', '') not in all_args: from jina.parsers.deprecated import get_deprecated_replacement new_arg = get_deprecated_replacement(arg) if new_arg: if not has_migration_tip: warn_strs.append('Migration tips:') has_migration_tip = True warn_strs.append(f'\t`{arg}` has been renamed to `{new_arg}`') real_unknown_args.append(arg) if real_unknown_args: warn_strs = [f'ignored unknown argument: {real_unknown_args}.'] + warn_strs warnings.warn(''.join(warn_strs)) class ArgNamespace: """Helper function for argparse.Namespace object.""" @staticmethod def kwargs2list(kwargs: Dict) -> List[str]: """ Convert dict to an argparse-friendly list. :param kwargs: dictionary of key-values to be converted :return: argument list """ args = [] from jina.serve.executors import BaseExecutor for k, v in kwargs.items(): k = k.replace('_', '-') if v is not None: if isinstance(v, bool): if v: args.append(f'--{k}') elif isinstance(v, list): # for nargs args.extend([f'--{k}', *(str(vv) for vv in v)]) elif isinstance(v, dict): args.extend([f'--{k}', json.dumps(v)]) elif isinstance(v, type) and issubclass(v, BaseExecutor): args.extend([f'--{k}', v.__name__]) else: args.extend([f'--{k}', str(v)]) return args @staticmethod def kwargs2namespace( kwargs: Dict[str, Union[str, int, bool]], parser: ArgumentParser, warn_unknown: bool = False, fallback_parsers: Optional[List[ArgumentParser]] = None, positional_args: Optional[Tuple[str, ...]] = None, ) -> Namespace: """ Convert dict to a namespace. :param kwargs: dictionary of key-values to be converted :param parser: the parser for building kwargs into a namespace :param warn_unknown: True, if unknown arguments should be logged :param fallback_parsers: a list of parsers to help resolving the args :param positional_args: some parser requires positional arguments to be presented :return: argument list """ args = ArgNamespace.kwargs2list(kwargs) if positional_args: args += positional_args p_args, unknown_args = parser.parse_known_args(args) if warn_unknown and unknown_args: _leftovers = set(unknown_args) if fallback_parsers: for p in fallback_parsers: _, _unk_args = p.parse_known_args(args) _leftovers = _leftovers.intersection(_unk_args) if not _leftovers: # all args have been resolved break warn_unknown_args(_leftovers) return p_args @staticmethod def get_non_defaults_args( args: Namespace, parser: ArgumentParser, taboo: Optional[Set[str]] = None ) -> Dict: """ Get non-default args in a dict. :param args: the namespace to parse :param parser: the parser for referring the default values :param taboo: exclude keys in the final result :return: non defaults """ if taboo is None: taboo = set() non_defaults = {} _defaults = vars(parser.parse_args([])) for k, v in vars(args).items(): if k in _defaults and k not in taboo and _defaults[k] != v: non_defaults[k] = v return non_defaults @staticmethod def flatten_to_dict( args: Union[Dict[str, 'Namespace'], 'Namespace'] ) -> Dict[str, Any]: """Convert argparse.Namespace to dict to be uploaded via REST. :param args: namespace or dict or namespace to dict. :return: pod args """ if isinstance(args, Namespace): return vars(args) elif isinstance(args, dict): pod_args = {} for k, v in args.items(): if isinstance(v, Namespace): pod_args[k] = vars(v) elif isinstance(v, list): pod_args[k] = [vars(_) for _ in v] else: pod_args[k] = v return pod_args def is_valid_local_config_source(path: str) -> bool: # TODO: this function must be refactored before 1.0 (Han 12.22) """ Check if the path is valid. :param path: Local file path. :return: True if the path is valid else False. """ try: from jina.jaml import parse_config_source parse_config_source(path) return True except FileNotFoundError: return False def get_full_version() -> Optional[Tuple[Dict, Dict]]: """ Get the version of libraries used in Jina and environment variables. :return: Version information and environment variables """ import os, grpc, google.protobuf, yaml, platform from jina import ( __version__, __proto_version__, __docarray_version__, __jina_env__, __uptime__, __unset_msg__, ) from google.protobuf.internal import api_implementation from grpc import _grpcio_metadata from jina.logging.predefined import default_logger from uuid import getnode try: info = { 'jina': __version__, 'docarray': __docarray_version__, 'jina-proto': __proto_version__, 'jina-vcs-tag': os.environ.get('JINA_VCS_VERSION', __unset_msg__), 'protobuf': google.protobuf.__version__, 'proto-backend': api_implementation._default_implementation_type, 'grpcio': getattr(grpc, '__version__', _grpcio_metadata.__version__), 'pyyaml': yaml.__version__, 'python': platform.python_version(), 'platform': platform.system(), 'platform-release': platform.release(), 'platform-version': platform.version(), 'architecture': platform.machine(), 'processor': platform.processor(), 'uid': getnode(), 'session-id': str(random_uuid(use_uuid1=True)), 'uptime': __uptime__, 'ci-vendor': get_ci_vendor() or __unset_msg__, } env_info = {k: os.getenv(k, __unset_msg__) for k in __jina_env__} full_version = info, env_info except Exception as e: default_logger.error(str(e)) full_version = None return full_version def format_full_version_info(info: Dict, env_info: Dict) -> str: """ Format the version information. :param info: Version information of Jina libraries. :param env_info: The Jina environment variables. :return: Formatted version information. """ version_info = '\n'.join(f'- {k:30s}{v}' for k, v in info.items()) env_info = '\n'.join(f'* {k:30s}{v}' for k, v in env_info.items()) return version_info + '\n' + env_info def _update_policy(): if __windows__: asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) elif 'JINA_DISABLE_UVLOOP' in os.environ: return else: try: import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) except ModuleNotFoundError: warnings.warn( 'Install `uvloop` via `pip install "jina[uvloop]"` for better performance.' ) def get_or_reuse_loop(): """ Get a new eventloop or reuse the current opened eventloop. :return: A new eventloop or reuse the current opened eventloop. """ try: loop = asyncio.get_running_loop() if loop.is_closed(): raise RuntimeError except RuntimeError: _update_policy() # no running event loop # create a new loop loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop def typename(obj): """ Get the typename of object. :param obj: Target object. :return: Typename of the obj. """ if not isinstance(obj, type): obj = obj.__class__ try: return f'{obj.__module__}.{obj.__name__}' except AttributeError: return str(obj) class CatchAllCleanupContextManager: """ This context manager guarantees, that the :method:``__exit__`` of the sub context is called, even when there is an Exception in the :method:``__enter__``. :param sub_context: The context, that should be taken care of. """ def __init__(self, sub_context): self.sub_context = sub_context def __enter__(self): pass def __exit__(self, exc_type, exc_val, exc_tb): if exc_type: self.sub_context.__exit__(exc_type, exc_val, exc_tb) class cached_property: """The decorator to cache property of a class.""" def __init__(self, func): """ Create the :class:`cached_property`. :param func: Cached function. """ self.func = func def __get__(self, obj, cls): cached_value = obj.__dict__.get(f'CACHED_{self.func.__name__}', None) if cached_value is not None: return cached_value value = obj.__dict__[f'CACHED_{self.func.__name__}'] = self.func(obj) return value def __delete__(self, obj): cached_value = obj.__dict__.get(f'CACHED_{self.func.__name__}', None) if cached_value is not None: if hasattr(cached_value, 'close'): cached_value.close() del obj.__dict__[f'CACHED_{self.func.__name__}'] class _cache_invalidate: """Class for cache invalidation, remove strategy. :param func: func to wrap as a decorator. :param attribute: String as the function name to invalidate cached data. E.g. in :class:`cached_property` we cache data inside the class obj with the `key`: `CACHED_{func.__name__}`, the func name in `cached_property` is the name to invalidate. """ def __init__(self, func, attribute: str): self.func = func self.attribute = attribute def __call__(self, *args, **kwargs): obj = args[0] cached_key = f'CACHED_{self.attribute}' if cached_key in obj.__dict__: del obj.__dict__[cached_key] # invalidate self.func(*args, **kwargs) def __get__(self, obj, cls): from functools import partial return partial(self.__call__, obj) def cache_invalidate(attribute: str): """The cache invalidator decorator to wrap the method call. Check the implementation in :class:`_cache_invalidate`. :param attribute: The func name as was stored in the obj to invalidate. :return: wrapped method. """ def _wrap(func): return _cache_invalidate(func, attribute) return _wrap def get_now_timestamp(): """ Get the datetime. :return: The datetime in int format. """ now = datetime.now() return int(datetime.timestamp(now)) def get_readable_time(*args, **kwargs): """ Get the datetime in human readable format (e.g. 115 days and 17 hours and 46 minutes and 40 seconds). For example: .. highlight:: python .. code-block:: python get_readable_time(seconds=1000) :param args: arguments for datetime.timedelta :param kwargs: key word arguments for datetime.timedelta :return: Datetime in human readable format. """ import datetime secs = float(datetime.timedelta(*args, **kwargs).total_seconds()) units = [('day', 86400), ('hour', 3600), ('minute', 60), ('second', 1)] parts = [] for unit, mul in units: if secs / mul >= 1 or mul == 1: if mul > 1: n = int(math.floor(secs / mul)) secs -= n * mul else: n = int(secs) parts.append(f'{n} {unit}' + ('' if n == 1 else 's')) return ' and '.join(parts) def get_internal_ip(): """ Return the private IP address of the gateway for connecting from other machine in the same network. :return: Private IP address. """ import socket ip = '127.0.0.1' try: with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s: # doesn't even have to be reachable s.connect(('10.255.255.255', 1)) ip = s.getsockname()[0] except Exception: pass return ip def get_public_ip(timeout: float = 0.3): """ Return the public IP address of the gateway for connecting from other machine in the public network. :param timeout: the seconds to wait until return None. :return: Public IP address. .. warn:: Set `timeout` to a large number will block the Flow. """ import urllib.request results = [] def _get_ip(url): try: req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'}) with urllib.request.urlopen(req, timeout=timeout) as fp: _ip = fp.read().decode().strip() results.append(_ip) except: pass # intentionally ignored, public ip is not showed ip_server_list = [ 'https://api.ipify.org', 'https://ident.me', 'https://checkip.amazonaws.com/', ] threads = [] for idx, ip in enumerate(ip_server_list): t = threading.Thread(target=_get_ip, args=(ip,)) threads.append(t) t.start() for t in threads: t.join(timeout) for r in results: if r: return r def convert_tuple_to_list(d: Dict): """ Convert all the tuple type values from a dict to list. :param d: Dict type of data. """ for k, v in d.items(): if isinstance(v, tuple): d[k] = list(v) elif isinstance(v, dict): convert_tuple_to_list(v) def is_jupyter() -> bool: # pragma: no cover """ Check if we're running in a Jupyter notebook, using magic command `get_ipython` that only available in Jupyter. :return: True if run in a Jupyter notebook else False. """ try: get_ipython # noqa: F821 except NameError: return False shell = get_ipython().__class__.__name__ # noqa: F821 if shell == 'ZMQInteractiveShell': return True # Jupyter notebook or qtconsole elif shell == 'Shell': return True # Google colab elif shell == 'TerminalInteractiveShell': return False # Terminal running IPython else: return False # Other type (?) def iscoroutinefunction(func: Callable): return inspect.iscoroutinefunction(func) async def run_in_threadpool(func: Callable, executor=None, *args, **kwargs): return await get_or_reuse_loop().run_in_executor( executor, functools.partial(func, *args, **kwargs) ) def run_async(func, *args, **kwargs): """Generalized asyncio.run for jupyter notebook. When running inside jupyter, an eventloop is already exist, can't be stopped, can't be killed. Directly calling asyncio.run will fail, as This function cannot be called when another asyncio event loop is running in the same thread. .. see_also: https://stackoverflow.com/questions/55409641/asyncio-run-cannot-be-called-from-a-running-event-loop call `run_async(my_function, any_event_loop=True, *args, **kwargs)` to enable run with any eventloop :param func: function to run :param args: parameters :param kwargs: key-value parameters :return: asyncio.run(func) """ any_event_loop = kwargs.pop('any_event_loop', False) class _RunThread(threading.Thread): """Create a running thread when in Jupyter notebook.""" def run(self): """Run given `func` asynchronously.""" self.result = asyncio.run(func(*args, **kwargs)) try: loop = asyncio.get_running_loop() except RuntimeError: loop = None if loop and loop.is_running(): # eventloop already exist # running inside Jupyter if any_event_loop or is_jupyter(): thread = _RunThread() thread.start() thread.join() try: return thread.result except AttributeError: from jina.excepts import BadClient raise BadClient( 'something wrong when running the eventloop, result can not be retrieved' ) else: raise RuntimeError( 'you have an eventloop running but not using Jupyter/ipython, ' 'this may mean you are using Jina with other integration? if so, then you ' 'may want to use Client/Flow(asyncio=True). If not, then ' 'please report this issue here: https://github.com/jina-ai/jina' ) else: return get_or_reuse_loop().run_until_complete(func(*args, **kwargs)) def slugify(value): """ Normalize string, converts to lowercase, removes non-alpha characters, and converts spaces to hyphens. :param value: Original string. :return: Processed string. """ s = str(value).strip().replace(' ', '_') return re.sub(r'(?u)[^-\w.]', '', s) def is_yaml_filepath(val) -> bool: """ Check if the file is YAML file. :param val: Path of target file. :return: True if the file is YAML else False. """ if __windows__: r = r'.*.ya?ml$' # TODO: might not be exhaustive else: r = r'^[/\w\-\_\.]+.ya?ml$' return re.match(r, val.strip()) is not None def download_mermaid_url(mermaid_url, output) -> None: """ Download the jpg image from mermaid_url. :param mermaid_url: The URL of the image. :param output: A filename specifying the name of the image to be created, the suffix svg/jpg determines the file type of the output image. """ from urllib.request import Request, urlopen try: req = Request(mermaid_url, headers={'User-Agent': 'Mozilla/5.0'}) with open(output, 'wb') as fp: fp.write(urlopen(req).read()) except: from jina.logging.predefined import default_logger default_logger.error( 'can not download image, please check your graph and the network connections' ) def find_request_binding(target): """Find `@request` decorated methods in a class. :param target: the target class to check :return: a dictionary with key as request type and value as method name """ import ast, inspect from jina import __default_endpoint__ res = {} def visit_function_def(node): for e in node.decorator_list: req_name = '' if isinstance(e, ast.Call) and e.func.id == 'requests': req_name = e.keywords[0].value.s elif isinstance(e, ast.Name) and e.id == 'requests': req_name = __default_endpoint__ if req_name: if req_name in res: raise ValueError( f'you already bind `{res[req_name]}` with `{req_name}` request' ) else: res[req_name] = node.name V = ast.NodeVisitor() V.visit_FunctionDef = visit_function_def V.visit(compile(inspect.getsource(target), '?', 'exec', ast.PyCF_ONLY_AST)) return res def dunder_get(_dict: Any, key: str) -> Any: """Returns value for a specified dunderkey A "dunderkey" is just a fieldname that may or may not contain double underscores (dunderscores!) for referencing nested keys in a dict. eg:: >>> data = {'a': {'b': 1}} >>> dunder_get(data, 'a__b') 1 key 'b' can be referrenced as 'a__b' :param _dict : (dict, list, struct or object) which we want to index into :param key : (str) that represents a first level or nested key in the dict :return: (mixed) value corresponding to the key """ try: part1, part2 = key.split('__', 1) except ValueError: part1, part2 = key, '' try: part1 = int(part1) # parse int parameter except ValueError: pass from google.protobuf.struct_pb2 import ListValue from google.protobuf.struct_pb2 import Struct if isinstance(part1, int): result = _dict[part1] elif isinstance(_dict, (dict, Struct, MutableMapping)): if part1 in _dict: result = _dict[part1] else: result = None elif isinstance(_dict, (Iterable, ListValue)): result = _dict[part1] else: result = getattr(_dict, part1) return dunder_get(result, part2) if part2 else result if TYPE_CHECKING: from fastapi import FastAPI def extend_rest_interface(app: 'FastAPI') -> 'FastAPI': """Extend Jina built-in FastAPI instance with customized APIs, routing, etc. :param app: the built-in FastAPI instance given by Jina :return: the extended FastAPI instance .. highlight:: python .. code-block:: python def extend_rest_interface(app: 'FastAPI'): @app.get('/extension1') async def root(): return {"message": "Hello World"} return app """ return app def get_ci_vendor() -> Optional[str]: from jina import __resources_path__ with open(os.path.join(__resources_path__, 'ci-vendors.json')) as fp: all_cis = json.load(fp) for c in all_cis: if isinstance(c['env'], str) and c['env'] in os.environ: return c['constant'] elif isinstance(c['env'], dict): for k, v in c['env'].items(): if os.environ.get(k, None) == v: return c['constant'] elif isinstance(c['env'], list): for k in c['env']: if k in os.environ: return c['constant'] def deprecate_by(new_fn): def _f(*args, **kwargs): import inspect old_fn_name = inspect.stack()[1][4][0].strip().split("=")[0].strip() warnings.warn( f'`{old_fn_name}` is renamed to `{new_fn.__name__}` with the same usage, please use the latter instead. ' f'The old function will be removed soon.', DeprecationWarning, ) return new_fn(*args, **kwargs) return _f def get_request_header() -> Dict: """Return the header of request. :return: request header """ metas, envs = get_full_version() header = { **{f'jinameta-{k}': str(v) for k, v in metas.items()}, **envs, } return header
28.411562
142
0.568335
4a14dbb66c75bb96488f7061754c0a079571f7ce
7,194
py
Python
grr/server/grr_response_server/gui/archive_generator.py
certxlm/grr
c2a442a27f656fb18dfa3bce098847e5c5b849d7
[ "Apache-2.0" ]
1
2019-08-28T23:48:20.000Z
2019-08-28T23:48:20.000Z
grr/server/grr_response_server/gui/archive_generator.py
AjitNair2/grr
2a2ea891b3927775872904cdd402a18e7bb3d143
[ "Apache-2.0" ]
2
2022-01-15T03:18:12.000Z
2022-02-13T22:02:43.000Z
grr/server/grr_response_server/gui/archive_generator.py
acidburn0zzz/grr
44e1a5b1630e8101610faaaebe15b19b5ad30cb1
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python """This file contains code to generate ZIP/TAR archives.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import io import os import zipfile from future.utils import iteritems from grr_response_core.lib import utils from grr_response_core.lib.util import collection from grr_response_core.lib.util.compat import yaml from grr_response_server import data_store from grr_response_server import file_store from grr_response_server.flows.general import export as flow_export from grr_response_server.gui.api_plugins import client as api_client def _ClientPathToString(client_path, prefix=""): """Returns a path-like String of client_path with optional prefix.""" return os.path.join(prefix, client_path.client_id, client_path.vfs_path) class CollectionArchiveGenerator(object): """Class that generates downloaded files archive from a collection.""" ZIP = "zip" TAR_GZ = "tar.gz" FILES_SKIPPED_WARNING = ( "# NOTE: Some files were skipped because they were referenced in the \n" "# collection but were not downloaded by GRR, so there were no data \n" "# blobs in the data store to archive.\n").encode("utf-8") BATCH_SIZE = 1000 def __init__(self, archive_format=ZIP, prefix=None, description=None, predicate=None, client_id=None): """CollectionArchiveGenerator constructor. Args: archive_format: May be ArchiveCollectionGenerator.ZIP or ArchiveCollectionGenerator.TAR_GZ. Defaults to ZIP. prefix: Name of the folder inside the archive that will contain all the generated data. description: String describing archive's contents. It will be included into the auto-generated MANIFEST file. Defaults to 'Files archive collection'. predicate: If not None, only the files matching the predicate will be archived, all others will be skipped. The predicate receives a db.ClientPath as input. client_id: The client_id to use when exporting a flow results collection. Raises: ValueError: if prefix is None. """ super(CollectionArchiveGenerator, self).__init__() if archive_format == self.ZIP: self.archive_generator = utils.StreamingZipGenerator( compression=zipfile.ZIP_DEFLATED) elif archive_format == self.TAR_GZ: self.archive_generator = utils.StreamingTarGenerator() else: raise ValueError("Unknown archive format: %s" % archive_format) if not prefix: raise ValueError("Prefix can't be None.") self.prefix = prefix self.description = description or "Files archive collection" self.archived_files = set() self.ignored_files = set() self.failed_files = set() self.processed_files = set() self.predicate = predicate or (lambda _: True) self.client_id = client_id @property def output_size(self): return self.archive_generator.output_size @property def total_files(self): return len(self.processed_files) def _GenerateDescription(self): """Generates description into a MANIFEST file in the archive.""" manifest = { "description": self.description, "processed_files": len(self.processed_files), "archived_files": len(self.archived_files), "ignored_files": len(self.ignored_files), "failed_files": len(self.failed_files) } if self.ignored_files: manifest["ignored_files_list"] = [ _ClientPathToString(cp, prefix="aff4:") for cp in self.ignored_files ] if self.failed_files: manifest["failed_files_list"] = [ _ClientPathToString(cp, prefix="aff4:") for cp in self.failed_files ] manifest_fd = io.BytesIO() if self.total_files != len(self.archived_files): manifest_fd.write(self.FILES_SKIPPED_WARNING) manifest_fd.write(yaml.Dump(manifest).encode("utf-8")) manifest_fd.seek(0) st = os.stat_result( (0o644, 0, 0, 0, 0, 0, len(manifest_fd.getvalue()), 0, 0, 0)) for chunk in self.archive_generator.WriteFromFD( manifest_fd, os.path.join(self.prefix, "MANIFEST"), st=st): yield chunk def _GenerateClientInfo(self, client_id, client_fd): """Yields chucks of archive information for given client.""" summary_dict = client_fd.ToPrimitiveDict(stringify_leaf_fields=True) summary = yaml.Dump(summary_dict).encode("utf-8") client_info_path = os.path.join(self.prefix, client_id, "client_info.yaml") st = os.stat_result((0o644, 0, 0, 0, 0, 0, len(summary), 0, 0, 0)) yield self.archive_generator.WriteFileHeader(client_info_path, st=st) yield self.archive_generator.WriteFileChunk(summary) yield self.archive_generator.WriteFileFooter() def Generate(self, items): """Generates archive from a given collection. Iterates the collection and generates an archive by yielding contents of every referenced file. Args: items: Iterable of rdf_client_fs.StatEntry objects Yields: Binary chunks comprising the generated archive. """ client_ids = set() for item_batch in collection.Batch(items, self.BATCH_SIZE): client_paths = set() for item in item_batch: try: client_path = flow_export.CollectionItemToClientPath( item, self.client_id) except flow_export.ItemNotExportableError: continue if not self.predicate(client_path): self.ignored_files.add(client_path) self.processed_files.add(client_path) continue client_ids.add(client_path.client_id) client_paths.add(client_path) for chunk in file_store.StreamFilesChunks(client_paths): self.processed_files.add(chunk.client_path) for output in self._WriteFileChunk(chunk=chunk): yield output self.processed_files |= client_paths - ( self.ignored_files | self.archived_files) if client_ids: for client_id, client_info in iteritems( data_store.REL_DB.MultiReadClientFullInfo(client_ids)): client = api_client.ApiClient().InitFromClientInfo(client_info) for chunk in self._GenerateClientInfo(client_id, client): yield chunk for chunk in self._GenerateDescription(): yield chunk yield self.archive_generator.Close() def _WriteFileChunk(self, chunk): """Yields binary chunks, respecting archive file headers and footers. Args: chunk: the StreamedFileChunk to be written """ if chunk.chunk_index == 0: # Make sure size of the original file is passed. It's required # when output_writer is StreamingTarWriter. st = os.stat_result((0o644, 0, 0, 0, 0, 0, chunk.total_size, 0, 0, 0)) target_path = _ClientPathToString(chunk.client_path, prefix=self.prefix) yield self.archive_generator.WriteFileHeader(target_path, st=st) yield self.archive_generator.WriteFileChunk(chunk.data) if chunk.chunk_index == chunk.total_chunks - 1: yield self.archive_generator.WriteFileFooter() self.archived_files.add(chunk.client_path)
34.421053
79
0.704754
4a14dc42262408a42b8bb579c7ac5b1538100803
359
py
Python
mall/apps/areas/serializers.py
xxbsg/meiduo
0e82628833c4b482884cd392b8d22cb8558f1ffd
[ "MIT" ]
null
null
null
mall/apps/areas/serializers.py
xxbsg/meiduo
0e82628833c4b482884cd392b8d22cb8558f1ffd
[ "MIT" ]
null
null
null
mall/apps/areas/serializers.py
xxbsg/meiduo
0e82628833c4b482884cd392b8d22cb8558f1ffd
[ "MIT" ]
null
null
null
from rest_framework import serializers from areas.models import Area class AreaSerializer(serializers.ModelSerializer): class Meta: model = Area fields = ['id','name'] class SubsAreaSerializer(serializers.ModelSerializer): subs = AreaSerializer(many=True) class Meta: model = Area fields = ['subs','id','name']
22.4375
54
0.679666
4a14de00e7b34f8a8c949df2b98a6d4bfb029d24
402
py
Python
password_required/test_urls.py
temnoregg/django-password-required
83721290ae61eafdfd2accd35a14e6916e467091
[ "BSD-3-Clause" ]
18
2015-07-24T07:24:08.000Z
2022-02-07T09:51:34.000Z
password_required/test_urls.py
temnoregg/django-password-required
83721290ae61eafdfd2accd35a14e6916e467091
[ "BSD-3-Clause" ]
3
2017-12-09T02:05:15.000Z
2020-11-04T11:59:48.000Z
password_required/test_urls.py
temnoregg/django-password-required
83721290ae61eafdfd2accd35a14e6916e467091
[ "BSD-3-Clause" ]
11
2015-03-16T08:41:22.000Z
2020-11-03T08:43:33.000Z
# -*- coding: utf-8 -*- from django.conf.urls.defaults import * urlpatterns = patterns('', (r'^password_required/$', 'password_required.views.login'), # We just need a 200 response code, never mind that the template # produces no output without a context. (r'^test/$', 'django.views.generic.simple.direct_to_template', { 'template': 'password_required_login.html', }), )
28.714286
68
0.669154
4a14de1914edd9be0af2d5c9fa025616c80cf6f8
9,174
py
Python
code/churnexplainer.py
peterableda/CML_AMP_Churn_Prediction
bf7982a0dc3550eb257f1235893e23c4cd38d579
[ "Apache-2.0" ]
6
2020-12-13T13:31:44.000Z
2022-01-14T14:47:24.000Z
code/churnexplainer.py
Foxfusion/CML_AMP_Churn_Prediction
a2a83c61d1708f762390bd77777465271f129d1c
[ "Apache-2.0" ]
4
2021-04-18T21:17:15.000Z
2022-03-29T01:33:54.000Z
code/churnexplainer.py
Foxfusion/CML_AMP_Churn_Prediction
a2a83c61d1708f762390bd77777465271f129d1c
[ "Apache-2.0" ]
11
2020-11-24T17:52:27.000Z
2022-03-22T20:17:59.000Z
# ########################################################################### # # CLOUDERA APPLIED MACHINE LEARNING PROTOTYPE (AMP) # (C) Cloudera, Inc. 2021 # All rights reserved. # # Applicable Open Source License: Apache 2.0 # # NOTE: Cloudera open source products are modular software products # made up of hundreds of individual components, each of which was # individually copyrighted. Each Cloudera open source product is a # collective work under U.S. Copyright Law. Your license to use the # collective work is as provided in your written agreement with # Cloudera. Used apart from the collective work, this file is # licensed for your use pursuant to the open source license # identified above. # # This code is provided to you pursuant a written agreement with # (i) Cloudera, Inc. or (ii) a third-party authorized to distribute # this code. If you do not have a written agreement with Cloudera nor # with an authorized and properly licensed third party, you do not # have any rights to access nor to use this code. # # Absent a written agreement with Cloudera, Inc. (“Cloudera”) to the # contrary, A) CLOUDERA PROVIDES THIS CODE TO YOU WITHOUT WARRANTIES OF ANY # KIND; (B) CLOUDERA DISCLAIMS ANY AND ALL EXPRESS AND IMPLIED # WARRANTIES WITH RESPECT TO THIS CODE, INCLUDING BUT NOT LIMITED TO # IMPLIED WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY AND # FITNESS FOR A PARTICULAR PURPOSE; (C) CLOUDERA IS NOT LIABLE TO YOU, # AND WILL NOT DEFEND, INDEMNIFY, NOR HOLD YOU HARMLESS FOR ANY CLAIMS # ARISING FROM OR RELATED TO THE CODE; AND (D)WITH RESPECT TO YOUR EXERCISE # OF ANY RIGHTS GRANTED TO YOU FOR THE CODE, CLOUDERA IS NOT LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, PUNITIVE OR # CONSEQUENTIAL DAMAGES INCLUDING, BUT NOT LIMITED TO, DAMAGES # RELATED TO LOST REVENUE, LOST PROFITS, LOSS OF INCOME, LOSS OF # BUSINESS ADVANTAGE OR UNAVAILABILITY, OR LOSS OR CORRUPTION OF # DATA. # # ########################################################################### import datetime, dill, os import pandas as pd from sklearn.pipeline import TransformerMixin from sklearn.preprocessing import LabelEncoder """ Explained model is a class that has attributes: - data, i.e. the features you get for a given dataset from load_dataset. This is a pandas dataframe that may include categorical variables. - labels, i.e. the boolean labels you get for a given dataset from load_dataset. - categoricalencoder, a fitted sklearn Transformer object that transforms the categorical columns in `data` to deterministic integer codes, yielding a plain numpy array often called `X` (leaves non-categorical columns untouched) - pipeline, a trained sklearn pipeline that takes `X` as input and predicts. - explainer, an instantiated LIME explainer that yields an explanation when it's explain instance method is run on an example `X` properties: - default_data - categorical_features - non_categorical_features - dtypes and methods for API (which works in terms of dictionaries): - cast_dct, converts values of dictionary to dtype corresponding to key - explain_dct, returns prediction and explanation for example dictionary and methods for users (who usually have dataframes): - predict_df, returns predictions for a df, i.e. runs it through categorical encoder and pipeline - explain_df, returns predictions and explanation for example dataframe """ class ExplainedModel: def __init__( self, model_name=None, labels=None, data=None, # dataset=None, data=None, labels=None, categoricalencoder=None, pipeline=None, explainer=None, data_dir=None, load=True, ): if model_name is not None: self.model_name = model_name self.is_loaded = False else: self.data = data self.labels = labels self.categoricalencoder = categoricalencoder self.pipeline = pipeline self.explainer = explainer self.is_loaded = True self.model_dir = os.path.join(data_dir, "models", self.model_name) self.model_path = os.path.join(self.model_dir, self.model_name + ".pkl") # if asked to load and not yet loaded, load model! if load and not self.is_loaded: self.load() def load(self): if not self.is_loaded: with open(self.model_path, "rb") as f: self.__dict__.update(dill.load(f)) self.is_loaded = True def save(self): dilldict = { "data": self.data, "labels": self.labels, "categoricalencoder": self.categoricalencoder, "pipeline": self.pipeline, "explainer": self.explainer, } # self._make_model_dir() with open(self.model_path, "wb") as f: dill.dump(dilldict, f) # def _make_model_name(self): # now = datetime.datetime.now().strftime("%Y%m%dT%H%M%S") # model_type = os.environ.get('CHURN_MODEL_TYPE', 'linear') # #model_name = '_'.join([now, self.dataset, model_type, get_git_hash()]) # model_name = '_'.join([now, self.dataset, model_type]) # return model_name # # def _make_model_dir(self): # if not os.path.exists(self.model_dir): # os.makedirs(self.model_dir) def predict_df(self, df): X = self.categoricalencoder.transform(df) return self.pipeline.predict_proba(X)[:, 1] def explain_df(self, df): X = self.categoricalencoder.transform(df) probability = self.pipeline.predict_proba(X)[0, 1] e = self.explainer.explain_instance(X[0], self.pipeline.predict_proba).as_map()[ 1 ] explanations = {self.explainer.feature_names[c]: weight for c, weight in e} return probability, explanations def explain_dct(self, dct): return self.explain_df(pd.DataFrame([dct])) def cast_dct(self, dct): return {k: self.dtypes[k].type(v) for k, v in dct.items()} @property def dtypes(self): if not hasattr(self, "_dtypes"): d = self.data[self.non_categorical_features].dtypes.to_dict() d.update( { c: self.data[c].cat.categories.dtype for c in self.categorical_features } ) self._dtypes = d return self._dtypes @property def non_categorical_features(self): return list( self.data.select_dtypes(exclude=["category"]).columns.drop( self.labels.name + " probability" ) ) @property def categorical_features(self): return list(self.data.select_dtypes(include=["category"]).columns) @property def stats(self): def describe(s): return { "median": s.median(), "mean": s.mean(), "min": s.min(), "max": s.max(), "std": s.std(), } if not hasattr(self, "_stats"): self._stats = { c: describe(self.data[c]) for c in self.non_categorical_features } return self._stats @property def label_name(self): return self.labels.name + " probability" @property def categories(self): return { feature: list(self.categoricalencoder.classes_[feature]) for feature in self.categorical_features } @property def default_data(self): # 0th class for categorical variables and mean for continuous if not hasattr(self, "_default_data"): d = {} d.update( { feature: self.categoricalencoder.classes_[feature][0] for feature in self.categorical_features } ) d.update( { feature: self.data[feature].median() for feature in self.non_categorical_features } ) self._default_data = d return self._default_data class CategoricalEncoder(TransformerMixin): def fit(self, X, y=None, *args, **kwargs): self.columns_ = X.columns self.cat_columns_ix_ = { c: i for i, c in enumerate(X.columns) if pd.api.types.is_categorical_dtype(X[c]) } self.cat_columns_ = pd.Index(self.cat_columns_ix_.keys()) self.non_cat_columns_ = X.columns.drop(self.cat_columns_) self.les_ = {c: LabelEncoder().fit(X[c]) for c in self.cat_columns_} self.classes_ = {c: list(self.les_[c].classes_) for c in self.cat_columns_} return self def transform(self, X, y=None, *args, **kwargs): data = X[self.columns_].values for c, i in self.cat_columns_ix_.items(): data[:, i] = self.les_[c].transform(data[:, i]) return data.astype(float) def __repr__(self): return "{}()".format(self.__class__.__name__)
36.404762
88
0.621648
4a14de195a3d1a7002c9994f208f75ed11a91e44
4,919
py
Python
tests/test_fix_code.py
specfault/GreenerPython
976260c3e78969cfd3e1e40639325f104325c703
[ "MIT" ]
null
null
null
tests/test_fix_code.py
specfault/GreenerPython
976260c3e78969cfd3e1e40639325f104325c703
[ "MIT" ]
null
null
null
tests/test_fix_code.py
specfault/GreenerPython
976260c3e78969cfd3e1e40639325f104325c703
[ "MIT" ]
null
null
null
import textwrap from tests.framework import AbstractFilePair from tests.framework import in_test_function from tests.framework import standard_test_spec from tests.framework import SavingFixesCombination from tests.framework import SavingFixesSUT from tests.framework import SavingDoesNotTouchBrokenStuff from tests.framework import fixing_test # both, test and SUT, are broken but fixable @fixing_test class TestSavingFixesCombination(SavingFixesCombination): tests = [ AbstractFilePair( # missing import in test, missing variable in SUT 'blubb', textwrap.dedent("""\ import unittest class TestSomething(unittest.TestCase): def test_something(self): bla = blubb.x """)) ] various_argument_lists = [[], ['arg'], ['arg1', 'arg2']] def missing_function_in_source(argument_list): # having the import is important: # it allows us to check that the test wasn't touched lines = [arg + ' = None' for arg in argument_list] lines += ['bla = blubb.random_function(' + ', '.join(argument_list) + ')'] test_code = in_test_function('\n'.join(lines)) return AbstractFilePair('blubb', test=test_code) # SUT is broken but fixable @fixing_test class TestSavingFixesSUT(SavingFixesSUT): tests = [ # call missing function with literal argument standard_test_spec( """ arg = 1 bla = blubb.some_function(arg, 42) """), # call missing function with two literal arguments standard_test_spec( """ bla = blubb.some_function(41, 42) """), # missing function with default argument standard_test_spec( """ arg = 1 bla = blubb.some_function(a=42) bla = blubb.some_function(arg) """), # missing function with normal and default argument standard_test_spec( """ arg = 1 bla = blubb.some_function(arg, a=42) bla = blubb.some_function(arg) """), # argument names might clash with generated names standard_test_spec( """ arg0 = 1 arg1 = 1 arg4 = 1 bla = blubb.some_function(arg4, arg0, 42, arg1) """), # create object of missing class with unary constructor standard_test_spec( """ a = blubb.Something(17) """), # create object of missing class with attribute standard_test_spec( """ a = blubb.Something() b = a.attribute """), # create object of missing class with nullary method standard_test_spec( """ a = blubb.Something() b = a.fun() """), # create object of missing class with unary method standard_test_spec( """ a = blubb.Something(42) """), ]\ + [missing_function_in_source(args) for args in various_argument_lists] # SUT and test are broken beyond repair @fixing_test class TestSavingDoesNotTouchBrokenStuff(SavingDoesNotTouchBrokenStuff): tests = [ standard_test_spec( # different number of function arguments """ bla = blubb.random_function(42) aaa = blubb.random_function(42, 37) """, """ def random_function(): pass """), standard_test_spec( # using non-existent lib 'bla = lalelu.x'), # using nonexistent lib variable AbstractFilePair( 'blubb', 'import collections\n' + in_test_function('Point = collections.random_typo')), # using lib variable as function AbstractFilePair( 'blubb', 'import math\n' + in_test_function('Point = math.pi()')), # using lib variable as function # SUT having same variable name makes the problem harder AbstractFilePair( 'blubb', 'import math\n' + in_test_function('Point = math.pi()'), 'pi = None\n'), # cryptic and broken import # (doesn't yet deal with wildcards) AbstractFilePair( 'blubb', 'from lalelu import *\n' + in_test_function('self.assertTrue(True)')), # adding strings and numbers is broken beyond repair # this should test return code JUST_BROKEN standard_test_spec( 'a = 3 + "lol"'), standard_test_spec( # broken function definition (missing colon) 'bla = blubb.random_function(42)', """ def random_function() pass """) ]
32.361842
79
0.559667
4a14decd7298c39f428e91e302de8694713f4dd6
329
py
Python
setup.py
shaun95/IMS-Toucan
331d3bb5ce7d010ce9bc0a739569d17bb9de6a25
[ "Apache-2.0" ]
93
2021-08-11T13:52:37.000Z
2022-03-29T23:19:07.000Z
setup.py
shaun95/IMS-Toucan
331d3bb5ce7d010ce9bc0a739569d17bb9de6a25
[ "Apache-2.0" ]
4
2021-12-15T17:23:14.000Z
2022-03-24T04:51:40.000Z
setup.py
shaun95/IMS-Toucan
331d3bb5ce7d010ce9bc0a739569d17bb9de6a25
[ "Apache-2.0" ]
25
2021-08-11T14:23:47.000Z
2022-03-28T20:23:51.000Z
from pathlib import Path from typing import List from setuptools import setup, find_packages project_root = Path(__file__).parent install_requires: List[str] = [] print(find_packages()) setup(name="ims_toucan", version="0.0.1", packages=find_packages(), python_requires=">=3.8", install_requires=install_requires, )
23.5
92
0.756839
4a14df4b94aa4e503c5bc90265ed2a43853744ce
51,389
py
Python
venv/lib/python3.6/site-packages/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/nxos.py
usegalaxy-no/usegalaxy
75dad095769fe918eb39677f2c887e681a747f3a
[ "MIT" ]
1
2020-01-22T13:11:23.000Z
2020-01-22T13:11:23.000Z
venv/lib/python3.6/site-packages/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/nxos.py
usegalaxy-no/usegalaxy
75dad095769fe918eb39677f2c887e681a747f3a
[ "MIT" ]
12
2020-02-21T07:24:52.000Z
2020-04-14T09:54:32.000Z
venv/lib/python3.6/site-packages/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/nxos.py
usegalaxy-no/usegalaxy
75dad095769fe918eb39677f2c887e681a747f3a
[ "MIT" ]
null
null
null
# # This code is part of Ansible, but is an independent component. # # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # Copyright: (c) 2017, Red Hat Inc. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from __future__ import absolute_import, division, print_function __metaclass__ = type import collections import json import re import sys from copy import deepcopy from ansible.module_utils._text import to_text from ansible.module_utils.basic import env_fallback from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import ( to_list, ComplexList, ) from ansible.module_utils.connection import Connection, ConnectionError from ansible.module_utils.common._collections_compat import Mapping from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.config import ( NetworkConfig, dumps, ) from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.config import ( CustomNetworkConfig, ) from ansible.module_utils.six import iteritems, PY2, PY3 from ansible.module_utils.urls import fetch_url try: import yaml HAS_YAML = True except ImportError: HAS_YAML = False try: if sys.version_info[:2] < (2, 7): from ordereddict import OrderedDict else: from collections import OrderedDict HAS_ORDEREDDICT = True except ImportError: HAS_ORDEREDDICT = False _DEVICE_CONNECTION = None nxos_provider_spec = { "host": dict(type="str"), "port": dict(type="int"), "username": dict( type="str", fallback=(env_fallback, ["ANSIBLE_NET_USERNAME"]) ), "password": dict( type="str", no_log=True, fallback=(env_fallback, ["ANSIBLE_NET_PASSWORD"]), ), "ssh_keyfile": dict( type="str", fallback=(env_fallback, ["ANSIBLE_NET_SSH_KEYFILE"]), no_log=False, ), "authorize": dict( type="bool", default=False, fallback=(env_fallback, ["ANSIBLE_NET_AUTHORIZE"]), ), "auth_pass": dict( type="str", no_log=True, fallback=(env_fallback, ["ANSIBLE_NET_AUTH_PASS"]), ), "use_ssl": dict(type="bool", default=False), "use_proxy": dict(type="bool", default=True), "validate_certs": dict(type="bool", default=False), "timeout": dict(type="int"), "transport": dict(type="str", default="cli", choices=["cli", "nxapi"]), } nxos_argument_spec = { "provider": dict( type="dict", options=nxos_provider_spec, removed_at_date="2022-06-01", removed_from_collection="cisco.nxos", ) } def get_provider_argspec(): return nxos_provider_spec def get_connection(module): global _DEVICE_CONNECTION if not _DEVICE_CONNECTION: if is_local_nxapi(module): conn = LocalNxapi(module) else: connection_proxy = Connection(module._socket_path) cap = json.loads(connection_proxy.get_capabilities()) if cap["network_api"] == "cliconf": conn = Cli(module) elif cap["network_api"] == "nxapi": conn = HttpApi(module) _DEVICE_CONNECTION = conn return _DEVICE_CONNECTION class Cli: def __init__(self, module): self._module = module self._device_configs = {} self._connection = None def _get_connection(self): if self._connection: return self._connection self._connection = Connection(self._module._socket_path) return self._connection def get_config(self, flags=None): """Retrieves the current config from the device or cache """ flags = [] if flags is None else flags cmd = "show running-config " cmd += " ".join(flags) cmd = cmd.strip() try: return self._device_configs[cmd] except KeyError: connection = self._get_connection() try: out = connection.get_config(flags=flags) except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) cfg = to_text(out, errors="surrogate_then_replace").strip() + "\n" self._device_configs[cmd] = cfg return cfg def run_commands(self, commands, check_rc=True): """Run list of commands on remote device and return results """ connection = self._get_connection() try: out = connection.run_commands(commands, check_rc) if check_rc == "retry_json": capabilities = self.get_capabilities() network_api = capabilities.get("network_api") if network_api == "cliconf" and out: for index, resp in enumerate(out): if ( "Invalid command at" in resp or "Ambiguous command at" in resp ) and "json" in resp: if commands[index]["output"] == "json": commands[index]["output"] = "text" out = connection.run_commands( commands, check_rc ) return out except ConnectionError as exc: self._module.fail_json(msg=to_text(exc)) def load_config(self, config, return_error=False, opts=None, replace=None): """Sends configuration commands to the remote device """ if opts is None: opts = {} connection = self._get_connection() responses = [] try: resp = connection.edit_config(config, replace=replace) if isinstance(resp, Mapping): resp = resp["response"] except ConnectionError as e: code = getattr(e, "code", 1) message = getattr(e, "err", e) err = to_text(message, errors="surrogate_then_replace") if opts.get("ignore_timeout") and code: responses.append(err) return responses elif code and "no graceful-restart" in err: if ( "ISSU/HA will be affected if Graceful Restart is disabled" in err ): msg = [""] responses.extend(msg) return responses else: self._module.fail_json(msg=err) elif code: self._module.fail_json(msg=err) responses.extend(resp) return responses def get_diff( self, candidate=None, running=None, diff_match="line", diff_ignore_lines=None, path=None, diff_replace="line", ): conn = self._get_connection() try: response = conn.get_diff( candidate=candidate, running=running, diff_match=diff_match, diff_ignore_lines=diff_ignore_lines, path=path, diff_replace=diff_replace, ) except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) return response def get_capabilities(self): """Returns platform info of the remove device """ if hasattr(self._module, "_capabilities"): return self._module._capabilities connection = self._get_connection() try: capabilities = connection.get_capabilities() except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) self._module._capabilities = json.loads(capabilities) return self._module._capabilities def read_module_context(self, module_key): connection = self._get_connection() try: module_context = connection.read_module_context(module_key) except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) return module_context def save_module_context(self, module_key, module_context): connection = self._get_connection() try: connection.save_module_context(module_key, module_context) except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) return None class LocalNxapi: OUTPUT_TO_COMMAND_TYPE = { "text": "cli_show_ascii", "json": "cli_show", "bash": "bash", "config": "cli_conf", } def __init__(self, module): self._module = module self._nxapi_auth = None self._device_configs = {} self._module_context = {} provider = self._module.params.get("provider") or {} self._module.params["url_username"] = provider.get("username") self._module.params["url_password"] = provider.get("password") host = provider.get("host") port = provider.get("port") if provider.get("use_ssl"): proto = "https" port = port or 443 else: proto = "http" port = port or 80 self._url = "%s://%s:%s/ins" % (proto, host, port) def _error(self, msg, **kwargs): self._nxapi_auth = None if "url" not in kwargs: kwargs["url"] = self._url self._module.fail_json(msg=msg, **kwargs) def _request_builder( self, commands, output, version="1.0", chunk="0", sid=None ): """Encodes a NXAPI JSON request message """ try: command_type = self.OUTPUT_TO_COMMAND_TYPE[output] except KeyError: msg = "invalid format, received %s, expected one of %s" % ( output, ",".join(self.OUTPUT_TO_COMMAND_TYPE.keys()), ) self._error(msg=msg) if isinstance(commands, (list, set, tuple)): commands = " ;".join(commands) # Order should not matter but some versions of NX-OS software fail # to process the payload properly if 'input' gets serialized before # 'type' and the payload of 'input' contains the word 'type'. msg = collections.OrderedDict() msg["version"] = version msg["type"] = command_type msg["chunk"] = chunk msg["sid"] = sid msg["input"] = commands msg["output_format"] = "json" return dict(ins_api=msg) def send_request( self, commands, output="text", check_status=True, return_error=False, opts=None, ): # only 10 show commands can be encoded in each request # messages sent to the remote device if opts is None: opts = {} if output != "config": commands = collections.deque(to_list(commands)) stack = list() requests = list() while commands: stack.append(commands.popleft()) if len(stack) == 10: body = self._request_builder(stack, output) data = self._module.jsonify(body) requests.append(data) stack = list() if stack: body = self._request_builder(stack, output) data = self._module.jsonify(body) requests.append(data) else: body = self._request_builder(commands, "config") requests = [self._module.jsonify(body)] headers = {"Content-Type": "application/json"} result = list() timeout = self._module.params["provider"]["timeout"] use_proxy = self._module.params["provider"]["use_proxy"] for req in requests: if self._nxapi_auth: headers["Cookie"] = self._nxapi_auth response, headers = fetch_url( self._module, self._url, data=req, headers=headers, timeout=timeout, method="POST", use_proxy=use_proxy, ) self._nxapi_auth = headers.get("set-cookie") if opts.get("ignore_timeout") and re.search( r"(-1|5\d\d)", str(headers["status"]) ): result.append(headers["status"]) return result elif headers["status"] != 200: self._error(**headers) try: response = self._module.from_json(response.read()) except ValueError: self._module.fail_json(msg="unable to parse response") if response["ins_api"].get("outputs"): output = response["ins_api"]["outputs"]["output"] for item in to_list(output): if check_status is True and item["code"] != "200": if return_error: result.append(item) else: self._error(output=output, **item) elif "body" in item: result.append(item["body"]) # else: # error in command but since check_status is disabled # silently drop it. # result.append(item['msg']) return result def get_config(self, flags=None): """Retrieves the current config from the device or cache """ flags = [] if flags is None else flags cmd = "show running-config " cmd += " ".join(flags) cmd = cmd.strip() try: return self._device_configs[cmd] except KeyError: out = self.send_request(cmd) cfg = str(out[0]).strip() self._device_configs[cmd] = cfg return cfg def run_commands(self, commands, check_rc=True): """Run list of commands on remote device and return results """ output = None queue = list() responses = list() def _send(commands, output): return self.send_request(commands, output, check_status=check_rc) for item in to_list(commands): if is_json(item["command"]): item["command"] = str(item["command"]).rsplit("|", 1)[0] item["output"] = "json" if all((output == "json", item["output"] == "text")) or all( (output == "text", item["output"] == "json") ): responses.extend(_send(queue, output)) queue = list() output = item["output"] or "json" queue.append(item["command"]) if queue: responses.extend(_send(queue, output)) return responses def load_config( self, commands, return_error=False, opts=None, replace=None ): """Sends the ordered set of commands to the device """ if opts is None: opts = {} responses = [] if replace: device_info = self.get_device_info() if "9K" not in device_info.get("network_os_platform", ""): self._module.fail_json( msg="replace is supported only on Nexus 9K devices" ) commands = "config replace {0}".format(replace) commands = to_list(commands) try: resp = self.send_request( commands, output="config", check_status=True, return_error=return_error, opts=opts, ) except ValueError as exc: code = getattr(exc, "code", 1) message = getattr(exc, "err", exc) err = to_text(message, errors="surrogate_then_replace") if opts.get("ignore_timeout") and code: responses.append(code) return responses elif code and "no graceful-restart" in err: if ( "ISSU/HA will be affected if Graceful Restart is disabled" in err ): msg = [""] responses.extend(msg) return responses else: self._module.fail_json(msg=err) elif code: self._module.fail_json(msg=err) if return_error: return resp else: return responses.extend(resp) def get_diff( self, candidate=None, running=None, diff_match="line", diff_ignore_lines=None, path=None, diff_replace="line", ): diff = {} # prepare candidate configuration candidate_obj = NetworkConfig(indent=2) candidate_obj.load(candidate) if running and diff_match != "none" and diff_replace != "config": # running configuration running_obj = NetworkConfig( indent=2, contents=running, ignore_lines=diff_ignore_lines ) configdiffobjs = candidate_obj.difference( running_obj, path=path, match=diff_match, replace=diff_replace ) else: configdiffobjs = candidate_obj.items diff["config_diff"] = ( dumps(configdiffobjs, "commands") if configdiffobjs else "" ) return diff def get_device_info(self): device_info = {} device_info["network_os"] = "nxos" reply = self.run_commands( {"command": "show version", "output": "json"} ) data = reply[0] platform_reply = self.run_commands( {"command": "show inventory", "output": "json"} ) platform_info = platform_reply[0] device_info["network_os_version"] = data.get( "sys_ver_str" ) or data.get("kickstart_ver_str") device_info["network_os_model"] = data["chassis_id"] device_info["network_os_hostname"] = data["host_name"] device_info["network_os_image"] = data.get( "isan_file_name" ) or data.get("kick_file_name") if platform_info: inventory_table = platform_info["TABLE_inv"]["ROW_inv"] for info in inventory_table: if "Chassis" in info["name"]: device_info["network_os_platform"] = info["productid"] return device_info def get_capabilities(self): result = {} result["device_info"] = self.get_device_info() result["network_api"] = "nxapi" return result def read_module_context(self, module_key): if self._module_context.get(module_key): return self._module_context[module_key] return None def save_module_context(self, module_key, module_context): self._module_context[module_key] = module_context return None class HttpApi: def __init__(self, module): self._module = module self._device_configs = {} self._module_context = {} self._connection_obj = None @property def _connection(self): if not self._connection_obj: self._connection_obj = Connection(self._module._socket_path) return self._connection_obj def run_commands(self, commands, check_rc=True): """Runs list of commands on remote device and returns results """ try: out = self._connection.send_request(commands) except ConnectionError as exc: if check_rc is True: raise out = to_text(exc) out = to_list(out) if not out[0]: return out for index, response in enumerate(out): if response[0] == "{": out[index] = json.loads(response) return out def get_config(self, flags=None): """Retrieves the current config from the device or cache """ flags = [] if flags is None else flags cmd = "show running-config " cmd += " ".join(flags) cmd = cmd.strip() try: return self._device_configs[cmd] except KeyError: try: out = self._connection.send_request(cmd) except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) cfg = to_text(out).strip() self._device_configs[cmd] = cfg return cfg def get_diff( self, candidate=None, running=None, diff_match="line", diff_ignore_lines=None, path=None, diff_replace="line", ): diff = {} # prepare candidate configuration candidate_obj = NetworkConfig(indent=2) candidate_obj.load(candidate) if running and diff_match != "none" and diff_replace != "config": # running configuration running_obj = NetworkConfig( indent=2, contents=running, ignore_lines=diff_ignore_lines ) configdiffobjs = candidate_obj.difference( running_obj, path=path, match=diff_match, replace=diff_replace ) else: configdiffobjs = candidate_obj.items diff["config_diff"] = ( dumps(configdiffobjs, "commands") if configdiffobjs else "" ) return diff def load_config( self, commands, return_error=False, opts=None, replace=None ): """Sends the ordered set of commands to the device """ if opts is None: opts = {} responses = [] try: resp = self.edit_config(commands, replace=replace) except ConnectionError as exc: code = getattr(exc, "code", 1) message = getattr(exc, "err", exc) err = to_text(message, errors="surrogate_then_replace") if opts.get("ignore_timeout") and code: responses.append(code) return responses elif opts.get("catch_clierror") and "400" in code: return [code, err] elif code and "no graceful-restart" in err: if ( "ISSU/HA will be affected if Graceful Restart is disabled" in err ): msg = [""] responses.extend(msg) return responses else: self._module.fail_json(msg=err) elif code: self._module.fail_json(msg=err) responses.extend(resp) return responses def edit_config( self, candidate=None, commit=True, replace=None, comment=None ): resp = list() self.check_edit_config_capability(candidate, commit, replace, comment) if replace: candidate = "config replace {0}".format(replace) responses = self._connection.send_request(candidate, output="config") for response in to_list(responses): if response != "{}": resp.append(response) if not resp: resp = [""] return resp def get_capabilities(self): """Returns platform info of the remove device """ try: capabilities = self._connection.get_capabilities() except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) return json.loads(capabilities) def check_edit_config_capability( self, candidate=None, commit=True, replace=None, comment=None ): operations = self._connection.get_device_operations() if not candidate and not replace: raise ValueError( "must provide a candidate or replace to load configuration" ) if commit not in (True, False): raise ValueError("'commit' must be a bool, got %s" % commit) if replace and not operations.get("supports_replace"): raise ValueError("configuration replace is not supported") if comment and not operations.get("supports_commit_comment", False): raise ValueError("commit comment is not supported") def read_module_context(self, module_key): try: module_context = self._connection.read_module_context(module_key) except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) return module_context def save_module_context(self, module_key, module_context): try: self._connection.save_module_context(module_key, module_context) except ConnectionError as exc: self._module.fail_json( msg=to_text(exc, errors="surrogate_then_replace") ) return None class NxosCmdRef: """NXOS Command Reference utilities. The NxosCmdRef class takes a yaml-formatted string of nxos module commands and converts it into dict-formatted database of getters/setters/defaults and associated common and platform-specific values. The utility methods add additional data such as existing states, playbook states, and proposed cli. The utilities also abstract away platform differences such as different defaults and different command syntax. Callers must provide a yaml formatted string that defines each command and its properties; e.g. BFD global: --- _template: # _template holds common settings for all commands # Enable feature bfd if disabled feature: bfd # Common getter syntax for BFD commands get_command: show run bfd all | incl '^(no )*bfd' interval: kind: dict getval: bfd interval (?P<tx>\\d+) min_rx (?P<min_rx>\\d+) multiplier (?P<multiplier>\\d+) setval: bfd interval {tx} min_rx {min_rx} multiplier {multiplier} default: tx: 50 min_rx: 50 multiplier: 3 N3K: # Platform overrides default: tx: 250 min_rx: 250 multiplier: 3 """ def __init__(self, module, cmd_ref_str, ref_only=False): """Initialize cmd_ref from yaml data.""" self._module = module self._check_imports() self._yaml_load(cmd_ref_str) self.cache_existing = None self.present_states = ["present", "merged", "replaced"] self.absent_states = ["absent", "deleted"] ref = self._ref # Create a list of supported commands based on ref keys ref["commands"] = sorted([k for k in ref if not k.startswith("_")]) ref["_proposed"] = [] ref["_context"] = [] ref["_resource_key"] = None if not ref_only: ref["_state"] = module.params.get("state", "present") self.feature_enable() self.get_platform_defaults() self.normalize_defaults() def __getitem__(self, key=None): if key is None: return self._ref return self._ref[key] def _check_imports(self): module = self._module msg = nxosCmdRef_import_check() if msg: module.fail_json(msg=msg) def _yaml_load(self, cmd_ref_str): if PY2: self._ref = yaml.load(cmd_ref_str) elif PY3: self._ref = yaml.load(cmd_ref_str, Loader=yaml.FullLoader) def feature_enable(self): """Add 'feature <foo>' to _proposed if ref includes a 'feature' key. """ ref = self._ref feature = ref["_template"].get("feature") if feature: show_cmd = "show run | incl 'feature {0}'".format(feature) output = self.execute_show_command(show_cmd, "text") if not output or "CLI command error" in output: msg = "** 'feature {0}' is not enabled. Module will auto-enable feature {0} ** ".format( feature ) self._module.warn(msg) ref["_proposed"].append("feature {0}".format(feature)) ref["_cli_is_feature_disabled"] = ref["_proposed"] def get_platform_shortname(self): """Query device for platform type, normalize to a shortname/nickname. Returns platform shortname (e.g. 'N3K-3058P' returns 'N3K') or None. """ # TBD: add this method logic to get_capabilities() after those methods # are made consistent across transports platform_info = self.execute_show_command("show inventory", "json") if not platform_info or not isinstance(platform_info, dict): return None inventory_table = platform_info["TABLE_inv"]["ROW_inv"] for info in inventory_table: if "Chassis" in info["name"]: network_os_platform = info["productid"] break else: return None # Supported Platforms: N3K,N5K,N6K,N7K,N9K,N3K-F,N9K-F m = re.match( "(?P<short>N[35679][K57])-(?P<N35>C35)*", network_os_platform ) if not m: return None shortname = m.group("short") # Normalize if m.groupdict().get("N35"): shortname = "N35" elif re.match("N77", shortname): shortname = "N7K" elif re.match(r"N3K|N9K", shortname): for info in inventory_table: if "-R" in info["productid"]: # Fretta Platform shortname += "-F" break return shortname def get_platform_defaults(self): """Update ref with platform specific defaults""" plat = self.get_platform_shortname() if not plat: return ref = self._ref ref["_platform_shortname"] = plat # Remove excluded commands (no platform support for command) for k in ref["commands"]: if plat in ref[k].get("_exclude", ""): ref["commands"].remove(k) # Update platform-specific settings for each item in ref plat_spec_cmds = [k for k in ref["commands"] if plat in ref[k]] for k in plat_spec_cmds: for plat_key in ref[k][plat]: ref[k][plat_key] = ref[k][plat][plat_key] def normalize_defaults(self): """Update ref defaults with normalized data""" ref = self._ref for k in ref["commands"]: if "default" in ref[k] and ref[k]["default"]: kind = ref[k]["kind"] if "int" == kind: ref[k]["default"] = int(ref[k]["default"]) elif "list" == kind: ref[k]["default"] = [str(i) for i in ref[k]["default"]] elif "dict" == kind: for key, v in ref[k]["default"].items(): if v: v = str(v) ref[k]["default"][key] = v def execute_show_command(self, command, format): """Generic show command helper. Warning: 'CLI command error' exceptions are caught, must be handled by caller. Return device output as a newline-separated string or None. """ cmds = [{"command": command, "output": format}] output = None try: output = run_commands(self._module, cmds) if output: output = output[0] except ConnectionError as exc: if "CLI command error" in repr(exc): # CLI may be feature disabled output = repr(exc) else: raise return output def pattern_match_existing(self, output, k): """Pattern matching helper for `get_existing`. `k` is the command name string. Use the pattern from cmd_ref to find a matching string in the output. Return regex match object or None. """ ref = self._ref pattern = re.compile(ref[k]["getval"]) multiple = "multiple" in ref[k].keys() match_lines = [re.search(pattern, line) for line in output] if "dict" == ref[k]["kind"]: match = [m for m in match_lines if m] if not match: return None if len(match) > 1 and not multiple: raise ValueError( "get_existing: multiple matches found for property {0}".format( k ) ) else: match = [m.groups() for m in match_lines if m] if not match: return None if len(match) > 1 and not multiple: raise ValueError( "get_existing: multiple matches found for property {0}".format( k ) ) for item in match: index = match.index(item) match[index] = list(item) # tuple to list # Handle config strings that nvgen with the 'no' prefix. # Example match behavior: # When pattern is: '(no )*foo *(\S+)*$' AND # When output is: 'no foo' -> match: ['no ', None] # When output is: 'foo 50' -> match: [None, '50'] if None is match[index][0]: match[index].pop(0) elif "no" in match[index][0]: match[index].pop(0) if not match: return None return match def set_context(self, context=None): """Update ref with command context. """ if context is None: context = [] ref = self._ref # Process any additional context that this propoerty might require. # 1) Global context from NxosCmdRef _template. # 2) Context passed in using context arg. ref["_context"] = ref["_template"].get("context", []) for cmd in context: ref["_context"].append(cmd) # Last key in context is the resource key ref["_resource_key"] = context[-1] if context else ref["_resource_key"] def get_existing(self, cache_output=None): """Update ref with existing command states from the device. Store these states in each command's 'existing' key. """ ref = self._ref if ref.get("_cli_is_feature_disabled"): # Add context to proposed if state is present if ref["_state"] in self.present_states: [ref["_proposed"].append(ctx) for ctx in ref["_context"]] return show_cmd = ref["_template"]["get_command"] if cache_output: output = cache_output else: output = self.execute_show_command(show_cmd, "text") or [] self.cache_existing = output # Add additional command context if needed. if ref["_context"]: output = CustomNetworkConfig(indent=2, contents=output) output = output.get_section(ref["_context"]) if not output: # Add context to proposed if state is present if ref["_state"] in self.present_states: [ref["_proposed"].append(ctx) for ctx in ref["_context"]] return # We need to remove the last item in context for state absent case. if ref["_state"] in self.absent_states and ref["_context"]: if ( ref["_resource_key"] and ref["_resource_key"] == ref["_context"][-1] ): if ref["_context"][-1] in output: ref["_context"][-1] = "no " + ref["_context"][-1] else: del ref["_context"][-1] return # Walk each cmd in ref, use cmd pattern to discover existing cmds output = output.split("\n") for k in ref["commands"]: match = self.pattern_match_existing(output, k) if not match: continue ref[k]["existing"] = {} for item in match: index = match.index(item) kind = ref[k]["kind"] if "int" == kind: ref[k]["existing"][index] = int(item[0]) elif "list" == kind: ref[k]["existing"][index] = [str(i) for i in item[0]] elif "dict" == kind: # The getval pattern should contain regex named group keys that # match up with the setval named placeholder keys; e.g. # getval: my-cmd (?P<foo>\d+) bar (?P<baz>\d+) # setval: my-cmd {foo} bar {baz} ref[k]["existing"][index] = {} for key in item.groupdict().keys(): ref[k]["existing"][index][key] = str(item.group(key)) elif "str" == kind: ref[k]["existing"][index] = item[0] else: raise ValueError( "get_existing: unknown 'kind' value specified for key '{0}'".format( k ) ) def get_playvals(self): """Update ref with values from the playbook. Store these values in each command's 'playval' key. """ ref = self._ref module = self._module params = {} if module.params.get("config"): # Resource module builder packs playvals under 'config' key param_data = module.params.get("config") params["global"] = param_data for key in param_data.keys(): if isinstance(param_data[key], list): params[key] = param_data[key] else: params["global"] = module.params for k in ref.keys(): for level in params.keys(): if isinstance(params[level], dict): params[level] = [params[level]] for item in params[level]: if k in item and item[k] is not None: if not ref[k].get("playval"): ref[k]["playval"] = {} playval = item[k] index = params[level].index(item) # Normalize each value if "int" == ref[k]["kind"]: playval = int(playval) elif "list" == ref[k]["kind"]: playval = [str(i) for i in playval] elif "dict" == ref[k]["kind"]: for key, v in playval.items(): playval[key] = str(v) ref[k]["playval"][index] = playval def build_cmd_set(self, playval, existing, k): """Helper function to create list of commands to configure device Return a list of commands """ ref = self._ref proposed = ref["_proposed"] cmd = None kind = ref[k]["kind"] if "int" == kind: cmd = ref[k]["setval"].format(playval) elif "list" == kind: cmd = ref[k]["setval"].format(*(playval)) elif "dict" == kind: # The setval pattern should contain placeholder keys that # match up with the getval regex named group keys; e.g. # getval: my-cmd (?P<foo>\d+) bar (?P<baz>\d+) # setval: my-cmd {foo} bar {baz} cmd = ref[k]["setval"].format(**playval) elif "str" == kind: if "deleted" in playval: if existing: cmd = "no " + ref[k]["setval"].format(existing) else: cmd = ref[k]["setval"].format(playval) else: raise ValueError( "get_proposed: unknown 'kind' value specified for key '{0}'".format( k ) ) if cmd: if ref["_state"] in self.absent_states and not re.search( r"^no", cmd ): cmd = "no " + cmd # Commands may require parent commands for proper context. # Global _template context is replaced by parameter context [proposed.append(ctx) for ctx in ref["_context"]] [proposed.append(ctx) for ctx in ref[k].get("context", [])] proposed.append(cmd) def get_proposed(self): """Compare playbook values against existing states and create a list of proposed commands. Return a list of raw cli command strings. """ ref = self._ref # '_proposed' may be empty list or contain initializations; e.g. ['feature foo'] proposed = ref["_proposed"] if ref["_context"] and ref["_context"][-1].startswith("no"): [proposed.append(ctx) for ctx in ref["_context"]] return proposed # Create a list of commands that have playbook values play_keys = [k for k in ref["commands"] if "playval" in ref[k]] def compare(playval, existing): if ref["_state"] in self.present_states: if existing is None: return False elif playval == existing: return True elif ( isinstance(existing, dict) and playval in existing.values() ): return True if ref["_state"] in self.absent_states: if isinstance(existing, dict) and all( x is None for x in existing.values() ): existing = None if existing is None or playval not in existing.values(): return True return False # Compare against current state for k in play_keys: playval = ref[k]["playval"] # Create playval copy to avoid RuntimeError # dictionary changed size during iteration error playval_copy = deepcopy(playval) existing = ref[k].get("existing", ref[k]["default"]) multiple = "multiple" in ref[k].keys() # Multiple Instances: if isinstance(existing, dict) and multiple: for ekey, evalue in existing.items(): if isinstance(evalue, dict): # Remove values set to string 'None' from dvalue evalue = dict( (k, v) for k, v in evalue.items() if v != "None" ) for pkey, pvalue in playval.items(): if compare(pvalue, evalue): if playval_copy.get(pkey): del playval_copy[pkey] if not playval_copy: continue # Single Instance: else: for pkey, pval in playval.items(): if compare(pval, existing): if playval_copy.get(pkey): del playval_copy[pkey] if not playval_copy: continue playval = playval_copy # Multiple Instances: if isinstance(existing, dict): for dkey, dvalue in existing.items(): for pval in playval.values(): self.build_cmd_set(pval, dvalue, k) # Single Instance: else: for pval in playval.values(): self.build_cmd_set(pval, existing, k) # Remove any duplicate commands before returning. # pylint: disable=unnecessary-lambda cmds = sorted(set(proposed), key=lambda x: proposed.index(x)) return cmds def nxosCmdRef_import_check(): """Return import error messages or empty string""" msg = "" if PY2: if not HAS_ORDEREDDICT and sys.version_info[:2] < (2, 7): msg += "Mandatory python library 'ordereddict' is not present, try 'pip install ordereddict'\n" if not HAS_YAML: msg += "Mandatory python library 'yaml' is not present, try 'pip install yaml'\n" elif PY3: if not HAS_YAML: msg += "Mandatory python library 'PyYAML' is not present, try 'pip install PyYAML'\n" return msg def is_json(cmd): return to_text(cmd).endswith("| json") def is_text(cmd): return not is_json(cmd) def is_local_nxapi(module): provider = module.params.get("provider") if provider: return provider.get("transport") == "nxapi" return False def to_command(module, commands): if is_local_nxapi(module): default_output = "json" else: default_output = "text" transform = ComplexList( dict( command=dict(key=True), output=dict(default=default_output), prompt=dict(type="list"), answer=dict(type="list"), newline=dict(type="bool", default=True), sendonly=dict(type="bool", default=False), check_all=dict(type="bool", default=False), ), module, ) commands = transform(to_list(commands)) for item in commands: if is_json(item["command"]): item["output"] = "json" return commands def get_config(module, flags=None): flags = [] if flags is None else flags conn = get_connection(module) return conn.get_config(flags=flags) def run_commands(module, commands, check_rc=True): conn = get_connection(module) return conn.run_commands(to_command(module, commands), check_rc) def load_config(module, config, return_error=False, opts=None, replace=None): conn = get_connection(module) return conn.load_config(config, return_error, opts, replace=replace) def get_capabilities(module): conn = get_connection(module) return conn.get_capabilities() def get_diff( self, candidate=None, running=None, diff_match="line", diff_ignore_lines=None, path=None, diff_replace="line", ): conn = self.get_connection() return conn.get_diff( candidate=candidate, running=running, diff_match=diff_match, diff_ignore_lines=diff_ignore_lines, path=path, diff_replace=diff_replace, ) def normalize_interface(name): """Return the normalized interface name """ if not name: return def _get_number(name): digits = "" for char in name: if char.isdigit() or char in "/.": digits += char return digits if name.lower().startswith("et"): if_type = "Ethernet" elif name.lower().startswith("vl"): if_type = "Vlan" elif name.lower().startswith("lo"): if_type = "loopback" elif name.lower().startswith("po"): if_type = "port-channel" elif name.lower().startswith("nv"): if_type = "nve" else: if_type = None number_list = name.split(" ") if len(number_list) == 2: number = number_list[-1].strip() else: number = _get_number(name) if if_type: proper_interface = if_type + number else: proper_interface = name return proper_interface def get_interface_type(interface): """Gets the type of interface """ if interface.upper().startswith("ET"): return "ethernet" elif interface.upper().startswith("VL"): return "svi" elif interface.upper().startswith("LO"): return "loopback" elif interface.upper().startswith("MG"): return "management" elif interface.upper().startswith("MA"): return "management" elif interface.upper().startswith("PO"): return "portchannel" elif interface.upper().startswith("NV"): return "nve" else: return "unknown" def default_intf_enabled(name="", sysdefs=None, mode=None): """Get device/version/interface-specific default 'enabled' state. L3: - Most L3 intfs default to 'shutdown'. Loopbacks default to 'no shutdown'. - Some legacy platforms default L3 intfs to 'no shutdown'. L2: - User-System-Default 'system default switchport shutdown' defines the enabled state for L2 intf's. USD defaults may be different on some platforms. - An intf may be explicitly defined as L2 with 'switchport' or it may be implicitly defined as L2 when USD 'system default switchport' is defined. """ if not name: return None if sysdefs is None: sysdefs = {} default = False if re.search("port-channel|loopback", name): default = True else: if mode is None: # intf 'switchport' cli is not present so use the user-system-default mode = sysdefs.get("mode") if mode == "layer3": default = sysdefs.get("L3_enabled") elif mode == "layer2": default = sysdefs.get("L2_enabled") return default def read_module_context(module): conn = get_connection(module) return conn.read_module_context(module._name) def save_module_context(module, module_context): conn = get_connection(module) return conn.save_module_context(module._name, module_context)
34.168218
107
0.555527
4a14df9868d3fe8c7650d26694f4637cb731bf8c
2,242
py
Python
linear_structures/array/414_third-maximum-number.py
b1tank/leetcode
0b71eb7a4f52291ff072b1280d6b76e68f7adfee
[ "MIT" ]
null
null
null
linear_structures/array/414_third-maximum-number.py
b1tank/leetcode
0b71eb7a4f52291ff072b1280d6b76e68f7adfee
[ "MIT" ]
null
null
null
linear_structures/array/414_third-maximum-number.py
b1tank/leetcode
0b71eb7a4f52291ff072b1280d6b76e68f7adfee
[ "MIT" ]
null
null
null
# Author: b1tank # Email: b1tank@outlook.com #================================= ''' 414_third-maximum-number LeetCode Solution: - intuitive - set() - set() cleaner version ! - heapq: heapq.heappushpop vs heapq.heapreplace ("heappoppush") ''' import heapq class Solution: def thirdMax(self, nums: List[int]) -> int: # m1, m2, m3 = nums[0], None, None # for i in nums: # if i < m1: # if not m2: # m2 = i # elif i > m2: # m3 = m2 # m2 = i # elif i < m2: # if not m3: # m3 = i # else: # m3 = max(m3, i) # elif i > m1: # m3 = m2 # m2 = m1 # m1 = i # if m3 is not None: # explicitly check None instead of using "if m3" because "if 0" returns False !!! # return m3 # return m1 # m = nums[0] # s = {m} # min_s = m # for i in nums: # if len(s) < 3: # s.add(i) # m = max(m, i) # min_s = min(s) # elif i not in s: # if i > min_s: # s.remove(min_s) # s.add(i) # min_s = min(s) # if len(s) < 3: # return m # return min(s) # maximums = set() # for num in nums: # maximums.add(num) # if len(maximums) > 3: # maximums.remove(min(maximums)) # if len(maximums) == 3: # return min(maximums) # return max(maximums) max_heap = [] s = set() for i in nums: if i not in s: s.add(i) if len(s) > 3: s.remove(heapq.heappushpop(max_heap, i)) else: heapq.heappush(max_heap, i) if len(s) == 3: return max_heap[0] return max(s)
29.116883
111
0.344781
4a14e00f17be18bec5136e9183871cb16ee58108
869
py
Python
face_app/main/helper_function.py
benjaminogbonna/face_expression_detection
81b7adc383c7724f2c355283302374c6af8d3ed9
[ "MIT" ]
null
null
null
face_app/main/helper_function.py
benjaminogbonna/face_expression_detection
81b7adc383c7724f2c355283302374c6af8d3ed9
[ "MIT" ]
null
null
null
face_app/main/helper_function.py
benjaminogbonna/face_expression_detection
81b7adc383c7724f2c355283302374c6af8d3ed9
[ "MIT" ]
null
null
null
import cv2 import os import numpy as np cur_dir = os.path.dirname(__file__) haarcascade = os.path.join(cur_dir, 'model', 'haarcascade_frontalface_default.xml') def load_and_prep_image(filename): """ Reads an image from filename, turns it into a tensor and reshapes it to (img_shape, img_shape, colour_channel). """ image = cv2.imread(filename) # gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) face_cascade = cv2.CascadeClassifier(haarcascade) faces = face_cascade.detectMultiScale(image, scaleFactor=1.3, minNeighbors=5) for (x, y, w, h) in faces: cv2.rectangle(image, (x, y), (x + w, y + h), (255, 0, 0), 2) face = image[y:y + h, x:x + w] gray = cv2.cvtColor(face, cv2.COLOR_BGR2GRAY) cropped_img = np.expand_dims(np.expand_dims(cv2.resize(gray, (48, 48)), -1), 0) return cropped_img
29.965517
87
0.666283
4a14e05d36af05e572780d0cff51160a03d4c988
1,106
py
Python
trove/guestagent/datastore/mariadb/manager.py
Tesora-Release/tesora-trove
042145a573ce08b5d7cb25e1491e391e777a20be
[ "Apache-2.0" ]
2
2016-08-27T01:59:08.000Z
2018-06-08T10:02:08.000Z
trove/guestagent/datastore/mariadb/manager.py
Tesora-Release/tesora-trove
042145a573ce08b5d7cb25e1491e391e777a20be
[ "Apache-2.0" ]
null
null
null
trove/guestagent/datastore/mariadb/manager.py
Tesora-Release/tesora-trove
042145a573ce08b5d7cb25e1491e391e777a20be
[ "Apache-2.0" ]
7
2016-04-06T19:03:03.000Z
2018-10-12T21:50:51.000Z
# Copyright 2015 Tesora, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from trove.guestagent.datastore.mariadb import ( service as mariadb_service) from trove.guestagent.datastore.galera_common import manager as galera_manager from trove.guestagent.datastore.mysql_common import service as mysql_service class Manager(galera_manager.GaleraManager): def __init__(self): super(Manager, self).__init__( mariadb_service.MariaDBApp, mysql_service.BaseMySqlAppStatus, mariadb_service.MariaDBAdmin)
36.866667
78
0.740506
4a14e0746139dc533b1b89bd7f273d751c6b02fe
2,597
py
Python
ScriptZora.py
SimHanssens/ZoraProject
e25b6d235745d5414be9bad0364069a7d930a3f4
[ "bzip2-1.0.6" ]
null
null
null
ScriptZora.py
SimHanssens/ZoraProject
e25b6d235745d5414be9bad0364069a7d930a3f4
[ "bzip2-1.0.6" ]
null
null
null
ScriptZora.py
SimHanssens/ZoraProject
e25b6d235745d5414be9bad0364069a7d930a3f4
[ "bzip2-1.0.6" ]
null
null
null
from naoqi import ALProxy, ALModule, ALBroker from PIL import Image mask = False IP = "10.3.208.67" port = 9559 video_device = ALProxy("ALVideoDevice", IP, port) camera = 0 # CameraTop resolutions = 3 # k4VGA color_spaces = 13 # kBGRColorSpace fps = 1 tts = ALProxy("ALTextToSpeech", IP, port) ledRGB = ALProxy("ALLeds", IP, port) steps = ALProxy("ALMotion", IP, port) class FaceCounterModule(ALModule): """ Counts all the faces seen """ def __init__(self, name): ALModule.__init__(self, name) self.memory = ALProxy("ALMemory") self.memory.subscribeToEvent("FaceDetected", self.getName(), "on_face_detected") def exit(self): self.memory.unsubscribeToEvent("FaceDetected", self.getName(), "on_face_detected") ALModule.exit(self) def on_face_detected(self, key, value, message): def initializer(): subscriber = video_device.subscribeCamera("demo7", camera, resolutions, color_spaces, fps) naoImage = video_device.getImageRemote(subscriber) # if (naoImage is not None): # show(naoImage) def show(naoImage): # Get the image size and pixel array. imageWidth = naoImage[0] imageHeight = naoImage[1] array = naoImage[6] image_string = str(bytearray(array)) # Create a PIL Image from our pixel array. im = Image.frombytes("RGB", (imageWidth, imageHeight), image_string) # Save the image. im.save("D:/ProgramFiles/ZoraProject/images/maskImage", "PNG") initializer() video_device.unsubscribe("demo7") if (mask == False): tts.say("Please wear a mask before entering!") ledRGB.fadeRGB("AllLeds", "red", 2) steps.moveTo(0.2, 0, 0) else: tts.say("You may now enter") ledRGB.fadeRGB("AllLeds", "green", 2) steps.moveTo(0.1, 0, 0) def register_module(): myBroker = ALBroker("myBroker", "0.0.0.0", # listen to anyone 0, # find a port and use it IP, # ip robot port, # port robot ) global face_counter_module # must be a global variable face_counter_module = FaceCounterModule("face_counter_module") try: while True: pass except KeyboardInterrupt: pass finally: face_counter_module.exit() if __name__ == '__main__': register_module()
28.228261
102
0.581825
4a14e1613330804f1f32f1e0b4c9432916b15b4b
4,464
py
Python
src/matplotx/_spy.py
RemDelaporteMathurin/matplotx
68c0fc4612f6060adb673cc35f203de7f2c3d19a
[ "MIT" ]
306
2021-12-02T15:58:57.000Z
2022-03-28T05:25:59.000Z
src/matplotx/_spy.py
sailfish009/matplotx
668c2e36082c65e49b83440eeed14da4273fdca2
[ "MIT" ]
6
2021-12-10T07:11:56.000Z
2022-02-07T15:53:47.000Z
src/matplotx/_spy.py
sailfish009/matplotx
668c2e36082c65e49b83440eeed14da4273fdca2
[ "MIT" ]
17
2021-12-11T12:44:59.000Z
2022-03-26T09:16:34.000Z
import tempfile import matplotlib.colors as colors import matplotlib.image as mpimg import matplotlib.pyplot as plt import numpy as np def spy(*args, filename=None, **kwargs): if filename is None: return _plot(*args, **kwargs) _write_png(filename, *args, **kwargs) def _plot(A, border_width: int = 0, border_color="0.5", colormap=None): with tempfile.NamedTemporaryFile() as fp: _write_png( fp.name, A, border_width=border_width, border_color=border_color, colormap=colormap, ) img = mpimg.imread(fp.name) plt.imshow(img, origin="upper", interpolation="nearest", cmap="gray") return plt def _write_png(filename, A, border_width: int = 0, border_color="0.5", colormap=None): import png # pypng iterator = RowIterator(A, border_width, border_color, colormap) m, n = A.shape w = png.Writer( n + 2 * border_width, m + 2 * border_width, greyscale=iterator.mode != "rgb", bitdepth=iterator.bitdepth, ) with open(filename, "wb") as f: w.write(f, iterator) class RowIterator: def __init__(self, A, border_width, border_color, colormap): self.A = A.tocsr() self.border_width = border_width rgb = np.array(colors.to_rgb(border_color)) border_color_is_bw = np.all(rgb[0] == rgb) and rgb[0] in [0, 1] border_color_is_gray = np.all(rgb[0] == rgb) if colormap is None and (border_width == 0 or border_color_is_bw): self.mode = "binary" self.border_color = False self.bitdepth = 1 self.dtype = bool elif colormap is None and border_color_is_gray: self.mode = "grayscale" self.bitdepth = 8 self.dtype = np.uint8 self.border_color = np.uint8(np.round(rgb[0] * 255)) else: self.mode = "rgb" self.border_color = np.round(rgb * 255).astype(np.uint8) self.dtype = np.uint8 self.bitdepth = 8 if colormap is None: if self.mode == "binary": def convert_values(idx, vals): out = np.ones(self.A.shape[1], dtype=self.dtype) out[idx] = False return out elif self.mode == "grayscale": def convert_values(idx, vals): out = np.full(self.A.shape[1], 255, dtype=self.dtype) out[idx] = 0 return out else: assert self.mode == "rgb" def convert_values(idx, vals): out = np.full((self.A.shape[1], 3), 255, dtype=self.dtype) out[idx, :] = 0 return out.flatten() else: assert self.mode == "rgb" # Convert the string into a colormap object with `to_rgba()`, # <https://stackoverflow.com/a/15140118/353337>. import matplotlib.cm as cmx cm = plt.get_cmap(colormap) c_norm = colors.Normalize( vmin=min(0.0, self.A.data.min()), vmax=max(0.0, self.A.data.max()) ) scalar_map = cmx.ScalarMappable(norm=c_norm, cmap=cm) def convert_values(idx, vals): x = np.zeros(self.A.shape[1]) x[idx] = vals out = scalar_map.to_rgba(x)[:, :3] * 255 out = np.round(out).astype(self.dtype) return out.flatten() self.convert_values = convert_values self.current = 0 def __iter__(self): return self def __next__(self): m = self.A.shape[0] b = self.border_width if self.current >= m + 2 * b: raise StopIteration if b == 0: row = self.A[self.current] out = self.convert_values(row.indices, row.data) else: if self.current < b or self.current > m + b - 1: out = np.tile(self.border_color, self.A.shape[1] + 2 * b).astype( self.dtype ) else: row = self.A[self.current - b] border = np.tile(self.border_color, b) out = np.concatenate( [border, self.convert_values(row.indices, row.data), border] ) self.current += 1 return out
30.367347
86
0.529794
4a14e4639e3a4069547b90cc54c21f68dde6d4dc
120
py
Python
services/admin.py
FGAUnB-REQ-GM/2021.2-PousadaAnimal
b7371aebccad0da23073de0db642a6ce824f919e
[ "MIT" ]
null
null
null
services/admin.py
FGAUnB-REQ-GM/2021.2-PousadaAnimal
b7371aebccad0da23073de0db642a6ce824f919e
[ "MIT" ]
95
2022-02-04T19:40:09.000Z
2022-03-31T20:24:11.000Z
services/admin.py
FGAUnB-REQ-GM/2021.2-PousadaAnimal
b7371aebccad0da23073de0db642a6ce824f919e
[ "MIT" ]
4
2022-01-26T23:51:48.000Z
2022-01-27T18:28:16.000Z
from django.contrib import admin from .models import Service # Register your models here. admin.site.register(Service)
20
32
0.808333
4a14e74ee52daadcbf961099e8beb9fa1c69482c
1,782
py
Python
predict.py
Tyelcie/Image_Classifier
92ae6b1b307492e8ad1732d0f516565838059bd3
[ "MIT" ]
1
2021-02-13T08:56:03.000Z
2021-02-13T08:56:03.000Z
predict.py
Tyelcie/Image_Classifier
92ae6b1b307492e8ad1732d0f516565838059bd3
[ "MIT" ]
null
null
null
predict.py
Tyelcie/Image_Classifier
92ae6b1b307492e8ad1732d0f516565838059bd3
[ "MIT" ]
null
null
null
import argparse import model_class as mc import data_process as dp import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import seaborn as sb import json from PIL import Image from torchvision import transforms, datasets, models parser = argparse.ArgumentParser() parser.add_argument('input', type = str, help = 'path of the flower image to be predicted') parser.add_argument('checkpoint', type = str) parser.add_argument('--top_k', dest = 'top_k', type = int, default = 5) parser.add_argument('--category_names', dest = 'category_names', type = str, default = 'cat_to_name.json', help = 'path of a json file that stores the map from category labels to flower names') parser.add_argument('--gpu', dest = 'gpu', action = 'store_true', default = False) args = parser.parse_args() if args.gpu: device = 'cuda' else: device = 'cpu' print('The model is running on {}'.format(device)) # read json labels with open(args.category_names, 'r') as f: cat_to_name = json.load(f) # load the model model = mc.load_checkpoint(args.checkpoint) # define inputs img_dir = args.input name = img_dir.split('/')[-2] dp.imshow(dp.process_image(img_dir)); plt.title(cat_to_name[name]); plt.axis('off'); # predict probs, classes = mc.predict(img_dir, model, args.top_k) print('probs: {}'.format(probs)) print('classes: {}'.format(classes)) # Display an image along with the top 5 classes img = Image.open(img_dir).convert('RGB') img_transform = transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224)]) img = img_transform(img) plt.figure(figsize = [4, 8]) plt.subplot(2, 1, 1) plt.imshow(img); plt.title(cat_to_name[name]); plt.axis('off'); plt.subplot(2, 1, 2) sb.barplot(x = probs, y = classes, color = 'pink'); plt.xlabel(''); plt.ylabel('');
28.285714
193
0.719978
4a14e8d68d9b27e524bf59dcb8128949e14ad407
9,414
py
Python
agents/ActorCritic_Separate.py
samuelfneumann/RLControl
71430b1de2e4262483908932eb44579c2ec8216d
[ "Apache-2.0" ]
9
2018-07-30T20:12:47.000Z
2021-02-05T17:02:04.000Z
agents/ActorCritic_Separate.py
samuelfneumann/RLControl
71430b1de2e4262483908932eb44579c2ec8216d
[ "Apache-2.0" ]
14
2020-01-28T22:38:58.000Z
2022-02-10T00:11:21.000Z
agents/ActorCritic_Separate.py
samuelfneumann/RLControl
71430b1de2e4262483908932eb44579c2ec8216d
[ "Apache-2.0" ]
3
2018-08-08T14:52:53.000Z
2021-01-23T18:00:05.000Z
from __future__ import print_function import numpy as np import tensorflow as tf from agents.base_agent import BaseAgent from agents.network.base_network_manager import BaseNetwork_Manager # from agents.network import ac_network from agents.network import ac_actor_network from agents.network import ac_critic_network from experiment import write_summary import utils.plot_utils class ActorCritic_Separate_Network_Manager(BaseNetwork_Manager): def __init__(self, config): super(ActorCritic_Separate_Network_Manager, self).__init__(config) self.rng = np.random.RandomState(config.random_seed) self.batch_size = config.batch_size # Custom parameters self.num_samples = config.num_samples self.rho = config.rho self.critic_update = config.critic_update # expected, sampled, mean(AE) self.actor_update = config.actor_update # cem(with uniform sampling), ll self.sample_for_eval = False if config.sample_for_eval == "True": self.sample_for_eval = True with self.graph.as_default(): tf.set_random_seed(config.random_seed) self.sess = tf.Session() self.actor_network = ac_actor_network.AC_Actor_Network(self.sess, self.input_norm, config) self.critic_network = ac_critic_network.AC_Critic_Network(self.sess, self.input_norm, config) self.sess.run(tf.global_variables_initializer()) self.actor_network.init_target_network() self.critic_network.init_target_network() def take_action(self, state, is_train, is_start): greedy_action = self.actor_network.predict_action(np.expand_dims(state, 0), False) greedy_action = greedy_action[0] if is_train: if is_start: self.train_ep_count += 1 if self.use_external_exploration: chosen_action = self.exploration_policy.generate(greedy_action, self.train_global_steps) else: # single state so first idx # single sample so first idx chosen_action = self.actor_network.sample_action(np.expand_dims(state, 0), False, is_single_sample=True)[0] self.train_global_steps += 1 if self.write_log: write_summary(self.writer, self.train_global_steps, chosen_action[0], tag='train/action_taken') alpha, mean, sigma = self.actor_network.getModalStats() write_summary(self.writer, self.train_global_steps, alpha[0], tag='train/alpha0') write_summary(self.writer, self.train_global_steps, alpha[1], tag='train/alpha1') write_summary(self.writer, self.train_global_steps, mean[0], tag='train/mean0') write_summary(self.writer, self.train_global_steps, mean[1], tag='train/mean1') write_summary(self.writer, self.train_global_steps, sigma[0], tag='train/sigma0') write_summary(self.writer, self.train_global_steps, sigma[1], tag='train/sigma1') if self.write_plot: alpha, mean, sigma = self.actor_network.getModalStats() func1 = self.critic_network.getQFunction(state) func2 = self.actor_network.getPolicyFunction(alpha, mean, sigma) utils.plot_utils.plotFunction("ActorCritic", [func1, func2], state, [greedy_action, mean], chosen_action, self.action_min, self.action_max, display_title='Actor-Critic, steps: ' + str(self.train_global_steps), save_title='steps_' + str(self.train_global_steps), save_dir=self.writer.get_logdir(), ep_count=self.train_ep_count, show=False) else: if is_start: self.eval_ep_count += 1 if self.sample_for_eval: # single state so first idx # single sample so first idx chosen_action = self.actor_network.sample_action(np.expand_dims(state, 0), False, is_single_sample=True)[0] else: chosen_action = greedy_action self.eval_global_steps += 1 if self.write_log: write_summary(self.writer, self.eval_global_steps, chosen_action[0], tag='eval/action_taken') return chosen_action def update_network(self, state_batch, action_batch, next_state_batch, reward_batch, gamma_batch): # Critic Update # Modified Actor-Critic if self.critic_update == "sampled": next_action_batch = self.actor_network.sample_action(next_state_batch, True, is_single_sample=True) next_action_batch_reshaped = np.reshape(next_action_batch, (self.batch_size * 1, self.action_dim)) # batchsize * n target_q = self.critic_network.predict_q_target(next_state_batch, next_action_batch_reshaped, True) elif self.critic_update == "expected": next_action_batch = self.actor_network.sample_action(next_state_batch, True, is_single_sample=False) next_action_batch_reshaped = np.reshape(next_action_batch, (self.batch_size * self.num_samples, self.action_dim)) stacked_next_state_batch = np.repeat(next_state_batch, self.num_samples, axis=0) # batchsize * n target_q = self.critic_network.predict_q_target(stacked_next_state_batch, next_action_batch_reshaped, True) target_q = np.reshape(target_q, (self.batch_size, self.num_samples)) target_q = np.mean(target_q, axis=1, keepdims=True) # average across samples elif self.critic_update == "mean": # Use original Actor next_action_batch_final_target = self.actor_network.predict_action(next_state_batch, True) # batchsize * n target_q = self.critic_network.predict_q_target(next_state_batch, next_action_batch_final_target, True) else: raise ValueError("Invalid self.critic_update config") reward_batch = np.reshape(reward_batch, (self.batch_size, 1)) gamma_batch = np.reshape(gamma_batch, (self.batch_size, 1)) # compute target : y_i = r_{i+1} + \gamma * max Q'(s_{i+1}, a') y_i = reward_batch + gamma_batch * target_q predicted_q_val, _ = self.critic_network.train_critic(state_batch, action_batch, y_i) stacked_state_batch = np.repeat(state_batch, self.num_samples, axis=0) # Actor Update # LogLikelihood update if self.actor_update == "ll": # for each transition, sample again? # shape: (batchsize , n actions, action_dim) # batch_size x num_samples x action_dim action_batch_new = self.actor_network.sample_action(state_batch, True, is_single_sample=False) action_batch_new_picked = np.array([a[0] for a in action_batch_new]) # reshape (batchsize * n , action_dim) action_batch_new_reshaped = np.reshape(action_batch_new, (self.batch_size * self.num_samples, self.action_dim)) q_val_batch_reshaped = self.critic_network.predict_q(stacked_state_batch, action_batch_new_reshaped, True) q_val_batch = np.reshape(q_val_batch_reshaped, (self.batch_size, self.num_samples)) q_val_picked = np.array([[b[0]] for b in q_val_batch]) q_val_mean = np.mean(q_val_batch, axis=1, keepdims=True) self.actor_network.train_actor_ll(state_batch, action_batch_new_picked, q_val_picked - q_val_mean) # CEM update elif self.actor_update == "cem": action_batch_init = self.actor_network.sample_action(state_batch, True, is_single_sample=False) # reshape (batchsize * n , action_dim) action_batch_final = action_batch_init action_batch_final_reshaped = np.reshape(action_batch_final, (self.batch_size * self.num_samples, self.action_dim)) q_val = self.critic_network.predict_q(stacked_state_batch, action_batch_final_reshaped, True) q_val = np.reshape(q_val, (self.batch_size, self.num_samples)) # Find threshold : top (1-rho) percentile selected_idxs = list(map(lambda x: x.argsort()[::-1][:int(self.num_samples * self.rho)], q_val)) action_list = [actions[idxs] for actions, idxs in zip(action_batch_final, selected_idxs)] stacked_state_batch = np.repeat(state_batch, int(self.num_samples * self.rho), axis=0) action_list = np.reshape(action_list, (self.batch_size * int(self.num_samples * self.rho), self.action_dim)) self.actor_network.train_actor_cem(stacked_state_batch, action_list) else: raise ValueError("Invalid self.actor_update config") # Update target networks self.critic_network.update_target_network() self.actor_network.update_target_network() class ActorCritic_Separate(BaseAgent): def __init__(self, config): network_manager = ActorCritic_Separate_Network_Manager(config) super(ActorCritic_Separate, self).__init__(config, network_manager)
45.259615
127
0.656257
4a14e93bca5aa8ff42a39d2ef264a0dc702e332f
2,992
py
Python
hand_net/test.py
clearsky767/examples
d6c744061ba5ed56088af43edb171990c6942efd
[ "BSD-3-Clause" ]
null
null
null
hand_net/test.py
clearsky767/examples
d6c744061ba5ed56088af43edb171990c6942efd
[ "BSD-3-Clause" ]
null
null
null
hand_net/test.py
clearsky767/examples
d6c744061ba5ed56088af43edb171990c6942efd
[ "BSD-3-Clause" ]
null
null
null
import torch import torch.nn as nn import torchvision.transforms as transforms import torch.utils.data as data import torch.backends.cudnn as cudnn import os import json import time import cv2 import shutil import argparse import numpy as np from PIL import Image, ImageDraw from resnet import resnet18 from roi_pooling import roi_pooling,adaptive_max_pool def show(img,points): draw = ImageDraw.Draw(img) pts = [tuple(point )for point in points] draw.point(pts, fill = (255, 0, 0)) draw.text((100,100), "hand", fill=(0,255,0)) img.show() def show2(img,target_tensor,w,h): target = target_tensor.cpu().detach().numpy() target = target.tolist() target = target[0] points = [] for i in range(0,len(target),2): p_w = target[i]*w p_h = target[i+1]*h points.append([p_w,p_h]) print(points) show(img,points) normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],std=[0.229, 0.224, 0.225]) img_transforms = transforms.Compose([ transforms.Resize((320,320)), transforms.ToTensor(), normalize, ]) gpu = 0 checkpoint_path = "checkpoint_99.pth" torch.manual_seed(1) if gpu is not None: torch.cuda.manual_seed(1) model = resnet18() print("loaded model!") if gpu is not None: model = model.cuda(gpu) print("model to gpu") if os.path.isfile(checkpoint_path): checkpoint = torch.load(checkpoint_path) model.load_state_dict(checkpoint['state_dict']) print("loaded checkpoint {}".format(checkpoint_path)) class HandNet(nn.Module): def __init__(self): super(HandNet, self).__init__() self.main = nn.Sequential( nn.Conv2d(128, 128, 4, 2, 1, bias=False), nn.BatchNorm2d(128), nn.LeakyReLU(0.2, inplace=True), nn.Conv2d(128, 256, 4, 2, 1, bias=False), nn.BatchNorm2d(256), nn.LeakyReLU(0.2, inplace=True), nn.Conv2d(256, 512, 4, 2, 1, bias=False), ) self.fc = nn.Linear(512, 2) self.sig = nn.Sigmoid() def forward(self, input): output = self.main(input) output = output.view(-1, 512) output = self.fc(output) output = self.sig(output) return output model2 = HandNet() if gpu is not None: model2 = model2.cuda(gpu) def main(): print("model") img = "test/2.jpg" #img_path,img_path2 = generate_edges(img) print(model) img = Image.open(img).convert('RGB') img_tensor = img_transforms(img) input = torch.unsqueeze(img_tensor,0) if gpu is not None: input = input.cuda(gpu, non_blocking=True) x1,x2,x3,f_map = model(input) print(x1.shape) print(f_map.shape) output2 = model2(f_map) print(output2.shape) out = adaptive_max_pool(f_map,(4,4)) print(out.shape) #img = transforms.ToPILImage()(img_tensor) #w = 320 #h = 320 #img = img.resize((w, h),Image.ANTIALIAS) #show2(img,output,w,h) if __name__ == '__main__': main()
25.57265
86
0.637032
4a14e948d90ff87675251b73eb3be69ad3896515
1,030
py
Python
annotator/urls.py
acdh-oeaw/ner-annotator
ee8f72248669b848eb273644d80ad52dc495a07c
[ "MIT" ]
1
2019-01-02T15:05:30.000Z
2019-01-02T15:05:30.000Z
annotator/urls.py
acdh-oeaw/ner-annotator
ee8f72248669b848eb273644d80ad52dc495a07c
[ "MIT" ]
8
2020-02-11T23:02:04.000Z
2021-06-10T20:39:58.000Z
annotator/urls.py
acdh-oeaw/ner-annotator
ee8f72248669b848eb273644d80ad52dc495a07c
[ "MIT" ]
1
2019-01-02T15:05:31.000Z
2019-01-02T15:05:31.000Z
from django.conf.urls import url, include, handler404 from django.contrib import admin from django.conf import settings from rest_framework import routers from annotations.api_views import NerSampleViewSet, NerSampleViewSetToDo if 'bib' in settings.INSTALLED_APPS: from bib.api_views import ZotItemViewSet router = routers.DefaultRouter() router.register(r'nersample', NerSampleViewSet) router.register(r'nersampletodo', NerSampleViewSetToDo) if 'bib' in settings.INSTALLED_APPS: router.register(r'zotitems', ZotItemViewSet) urlpatterns = [ url(r'^api/', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), url(r'^admin/', admin.site.urls), url(r'^', include('webpage.urls', namespace='webpage')), url(r'^annotations/', include('annotations.urls', namespace='annotations')) ] if 'bib' in settings.INSTALLED_APPS: urlpatterns.append( url(r'^bib/', include('bib.urls', namespace='bib')), ) handler404 = 'webpage.views.handler404'
32.1875
83
0.741748
4a14e9b43adec8d5df64ed4ac8a7ff9e62ade359
1,261
py
Python
apps/util/speaker/__init__.py
donno2048/Rosehip-repl
b4f76a8cfc41a8b542b621a7d334ff643c716b79
[ "MIT" ]
null
null
null
apps/util/speaker/__init__.py
donno2048/Rosehip-repl
b4f76a8cfc41a8b542b621a7d334ff643c716b79
[ "MIT" ]
null
null
null
apps/util/speaker/__init__.py
donno2048/Rosehip-repl
b4f76a8cfc41a8b542b621a7d334ff643c716b79
[ "MIT" ]
null
null
null
import threading, pygame, pygame_gui;from pygame_gui.elements import UILabel;from pygame_gui.elements import UITextEntryLine;import pyttsx3 class Speaker(pygame_gui.elements.UIWindow): def __init__(self, pos, manager):super().__init__(pygame.Rect(pos, (400, 128)),manager=manager,window_display_title="speaker",object_id="#speaker",);self.label = UILabel(relative_rect=pygame.Rect(-20, 10, 400, 20),text="",manager=manager,container=self,);self.input = UITextEntryLine(relative_rect=pygame.Rect(0, 40, 368, 30), manager=manager, container=self);self.engine = pyttsx3.init();self.engine.setProperty("rate", 150);self.speakthrd = None def process_event(self, event): super().process_event(event) if event.type == pygame.USEREVENT and event.ui_element == self.input and event.user_type == pygame_gui.UI_TEXT_ENTRY_FINISHED and (self.speakthrd is None or not self.speakthrd.is_alive()) and self.input.get_text!="":self.engine.say(self.input.get_text);self.speakthrd = threading.Thread(target=self.engine.runAndWait, args=());self.speakthrd.start();self.label.set_text(self.input.get_text);self.input.set_text("") def load(manager, params):pos = params[0] if params is not None and len(params) > 0 else (100,100);Speaker(pos, manager)
157.625
452
0.76368
4a14e9d07ef1dd5462070749c40d6c1dbff3aa57
1,561
py
Python
pythia/pyre/db/Schemer.py
willic3/pythia
2657b95a0c07fd3c914ab6b5f7ec89a8edba004c
[ "BSD-3-Clause" ]
1
2015-11-30T08:01:39.000Z
2015-11-30T08:01:39.000Z
pythia/pyre/db/Schemer.py
willic3/pythia
2657b95a0c07fd3c914ab6b5f7ec89a8edba004c
[ "BSD-3-Clause" ]
27
2018-05-24T18:31:25.000Z
2021-10-16T03:57:52.000Z
pythia/pyre/db/Schemer.py
willic3/pythia
2657b95a0c07fd3c914ab6b5f7ec89a8edba004c
[ "BSD-3-Clause" ]
7
2019-07-19T02:30:56.000Z
2021-06-02T22:00:01.000Z
#!/usr/bin/env python # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Michael A.G. Aivazis # California Institute of Technology # (C) 1998-2005 All Rights Reserved # # {LicenseText} # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # from .Column import Column class Schemer(type): def __init__(cls, name, bases, dict): type.__init__(cls, name, bases, dict) writeable = [] columnRegistry = {} # register inherited columns bases = list(bases) bases.reverse() for base in bases: try: columnRegistry.update(base._columnRegistry) except AttributeError: pass try: writeable += base._writeable except AttributeError: pass # scan the class record for columns for name, item in cls.__dict__.items(): # disregard entries that do not derive from Column if not isinstance(item, Column): continue # register it columnRegistry[item.name] = item if not item.auto: writeable.append(item.name) # install the registries into the class record cls._writeable = writeable cls._columnRegistry = columnRegistry return # version __id__ = "$Id: Schemer.py,v 1.2 2005/04/07 22:16:36 aivazis Exp $" # End of file
24.015385
80
0.484305
4a14ea9b0cd4e8eadaa583e10b6a165100921d3e
237
py
Python
pyaz/dla/catalog/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
null
null
null
pyaz/dla/catalog/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
null
null
null
pyaz/dla/catalog/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
1
2022-02-03T09:12:01.000Z
2022-02-03T09:12:01.000Z
''' Manage Data Lake Analytics catalogs. ''' from ... pyaz_utils import _call_az from . import assembly, credential, database, external_data_source, package, procedure, schema, table, table_partition, table_stats, table_type, tvf, view
33.857143
154
0.780591
4a14eb21ead249165aa1e3c6126f589c4cafbfb7
1,646
py
Python
app/core/tests/test_models.py
robnmrz/building-projects-api
67855e6d2e6339020fca1401551c4e67d40f83e9
[ "MIT" ]
null
null
null
app/core/tests/test_models.py
robnmrz/building-projects-api
67855e6d2e6339020fca1401551c4e67d40f83e9
[ "MIT" ]
4
2020-08-19T16:08:48.000Z
2020-08-19T17:12:39.000Z
app/core/tests/test_models.py
robnmrz/building-projects-api
67855e6d2e6339020fca1401551c4e67d40f83e9
[ "MIT" ]
null
null
null
from django.test import TestCase from django.contrib.auth import get_user_model class ModelTests(TestCase): def test_create_user_with_email_successfull(self): """ Test creating a new user with an email is successfull """ email = "robinmerz@test.de" password = "testpassword" # creating a test user user = get_user_model().objects.create_user( email = email, password = password ) self.assertEqual(user.email, email) self.assertTrue(user.check_password(password)) def test_new_user_email_normalized(self): """ Test the email for a new user is normalized """ email = "robinmerz@TEST.COM" password = "testpassword" user = get_user_model().objects.create_user( email = email, password = password ) self.assertEqual(user.email, email.lower()) def test_new_user_invalid_email(self): """ Test creating user with no email rases error """ password = "testpassword" with self.assertRaises(ValueError): get_user_model().objects.create_user( email = None, password = password ) def test_create_new_superuser_successfull(self): """ Test creating a new superuser is successfull """ email = "robinmerz@test.de" password = "testpassword" user = get_user_model().objects.create_superuser( email = email, password = password ) self.assertTrue(user.is_superuser) self.assertTrue(user.is_staff)
31.056604
69
0.608748
4a14eb66ceecf4234b339b9c2bcf67bcc8f4b592
1,633
py
Python
backend/pages/models/course.py
draihal/main-pr
81814c5370b592963e91ad0683caa560b0ea9579
[ "MIT" ]
2
2021-01-28T08:23:15.000Z
2021-03-09T06:06:58.000Z
backend/pages/models/course.py
draihal/main-pr
81814c5370b592963e91ad0683caa560b0ea9579
[ "MIT" ]
9
2020-01-02T15:31:04.000Z
2021-12-09T01:59:26.000Z
backend/pages/models/course.py
draihal/main-pr
81814c5370b592963e91ad0683caa560b0ea9579
[ "MIT" ]
1
2021-03-09T06:11:16.000Z
2021-03-09T06:11:16.000Z
from django.db import models from django.core.validators import validate_image_file_extension from rest_framework.reverse import reverse as api_reverse def upload_image_dir(instance, filename): return f'site/courses/{filename.lower()}' class Course(models.Model): name = models.CharField('Название курса', max_length=150, ) slug = models.SlugField('Slug для url', max_length=150, unique=True,) category = models.ForeignKey( 'pages.CourseCategory', on_delete=models.CASCADE, verbose_name='Категория курса') image = models.ImageField( 'Изображение для курса', upload_to=upload_image_dir, blank=True, validators=[validate_image_file_extension]) # TODO hash description = models.TextField('Что даст этот курс', ) necessary_knowledge = models.TextField('Необходимые знания', ) study_process = models.TextField('Процесс обучения', ) graduation_project = models.TextField('Выпускной проект', ) after_training = models.TextField('После обучения', ) certificate_sample = models.ImageField( 'Образец сертификата', upload_to=upload_image_dir, blank=True, validators=[validate_image_file_extension]) # TODO hash # program_details = # TODO updated_at = models.DateTimeField('Последнее обновление', auto_now=True) class Meta: ordering = ('name',) verbose_name = 'Курс' verbose_name_plural = 'Курсы' def __str__(self): return self.name def get_api_url(self, request=None): return api_reverse('pages:courses-detail', kwargs={'slug': self.slug}, request=request)
37.113636
95
0.706062
4a14ec2e8f97cd4723376b67945400ce5ab44ffd
532
py
Python
api/urls/spaces.py
PPinto22/watergenius-backend
5ee795485b2b4b4c2b334461c7ddb0ef2496096a
[ "Apache-2.0" ]
null
null
null
api/urls/spaces.py
PPinto22/watergenius-backend
5ee795485b2b4b4c2b334461c7ddb0ef2496096a
[ "Apache-2.0" ]
2
2021-03-19T23:01:20.000Z
2021-06-10T22:30:57.000Z
api/urls/spaces.py
PPinto22/watergenius-backend
5ee795485b2b4b4c2b334461c7ddb0ef2496096a
[ "Apache-2.0" ]
null
null
null
from django.urls import path from api.views.spaces import SpacesListView, SpaceDetailView, SpaceRestrictionsListView, \ SpaceRestrictionDetailView # /spaces urlpatterns = [ path('', SpacesListView.as_view(), name='spaces'), path('<int:spaceid>/', SpaceDetailView.as_view(), name='spaceDetail'), path('<int:spaceid>/restrictions/', SpaceRestrictionsListView.as_view(), name='spaceRestrictions'), path('<int:spaceid>/restrictions/<int:resid>/', SpaceRestrictionDetailView.as_view(), name='spaceRestriction'), ]
38
115
0.746241
4a14ec871fc443388cec773008de45acc2a26253
31,033
py
Python
trac/web/api.py
pkdevbox/trac
d044fc469e4dcbc5901c992b1b4160e9cbecee25
[ "BSD-3-Clause" ]
null
null
null
trac/web/api.py
pkdevbox/trac
d044fc469e4dcbc5901c992b1b4160e9cbecee25
[ "BSD-3-Clause" ]
null
null
null
trac/web/api.py
pkdevbox/trac
d044fc469e4dcbc5901c992b1b4160e9cbecee25
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # # Copyright (C) 2005-2009 Edgewall Software # Copyright (C) 2005-2006 Christopher Lenz <cmlenz@gmx.de> # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at http://trac.edgewall.org/wiki/TracLicense. # # This software consists of voluntary contributions made by many # individuals. For the exact contribution history, see the revision # history and logs, available at http://trac.edgewall.org/log/. # # Author: Christopher Lenz <cmlenz@gmx.de> from abc import ABCMeta from BaseHTTPServer import BaseHTTPRequestHandler from Cookie import CookieError, BaseCookie, SimpleCookie import cgi from datetime import datetime import errno from hashlib import md5 import new import mimetypes import os import re import socket from StringIO import StringIO import sys import urlparse from genshi.builder import Fragment from trac.core import Interface, TracBaseError from trac.util import get_last_traceback, lazy, unquote from trac.util.datefmt import http_date, localtz from trac.util.text import empty, exception_to_unicode, to_unicode from trac.util.translation import _ from trac.web.href import Href from trac.web.wsgi import _FileWrapper class IAuthenticator(Interface): """Extension point interface for components that can provide the name of the remote user.""" def authenticate(req): """Return the name of the remote user, or `None` if the identity of the user is unknown.""" class IRequestHandler(Interface): """Decide which `trac.core.Component` handles which `Request`, and how. The boolean property `is_valid_default_handler` determines whether the `IRequestFilter` can be used as a `default_handler` and defaults to `True`. To be suitable as a `default_handler`, an `IRequestFilter` must return an HTML document and `data` dictionary for rendering the document, and must not require that `match_request` be called prior to `process_request`. The boolean property `jquery_noconflict` determines whether jQuery's `noConflict` mode will be activated by the handler, and defaults to `False`. """ def match_request(req): """Return whether the handler wants to process the given request.""" def process_request(req): """Process the request. Return a `(template_name, data, content_type)` tuple, where `data` is a dictionary of substitutions for the Genshi template. "text/html" is assumed if `content_type` is `None`. Note that if template processing should not occur, this method can simply send the response itself and not return anything. :Since 1.0: Clearsilver templates are no longer supported. :Since 1.1.2: the rendering `method` (xml, xhtml or text) may be returned as a fourth parameter in the tuple, but if not specified it will be inferred from the `content_type` when rendering the template. """ def is_valid_default_handler(handler): """Returns `True` if the `handler` is a valid default handler, as described in the `IRequestHandler` interface documentation. """ return handler and getattr(handler, 'is_valid_default_handler', True) class IRequestFilter(Interface): """Enable components to interfere with the processing done by the main handler, either before and/or after it enters in action. """ def pre_process_request(req, handler): """Called after initial handler selection, and can be used to change the selected handler or redirect request. Always returns the request handler, even if unchanged. """ def post_process_request(req, template, data, content_type, method=None): """Do any post-processing the request might need; typically adding values to the template `data` dictionary, or changing the Genshi template or mime type. `data` may be updated in place. Always returns a tuple of (template, data, content_type), even if unchanged. Note that `template`, `data`, `content_type` will be `None` if: - called when processing an error page - the default request handler did not return any result :Since 0.11: there's a `data` argument for supporting Genshi templates; this introduced a difference in arity which made it possible to distinguish between the IRequestFilter components still targeted at ClearSilver templates and the newer ones targeted at Genshi templates. :Since 1.0: Clearsilver templates are no longer supported. :Since 1.1.2: the rendering `method` will be passed if it is returned by the request handler, otherwise `method` will be `None`. For backward compatibility, the parameter is optional in the implementation's signature. """ class ITemplateStreamFilter(Interface): """Transform the generated content by filtering the Genshi event stream generated by the template, prior to its serialization. """ def filter_stream(req, method, filename, stream, data): """Return a filtered Genshi event stream, or the original unfiltered stream if no match. `req` is the current request object, `method` is the Genshi render method (xml, xhtml or text), `filename` is the filename of the template to be rendered, `stream` is the event stream and `data` is the data for the current template. See the Genshi_ documentation for more information. .. _Genshi: http://genshi.edgewall.org/wiki/Documentation/filters.html """ HTTP_STATUS = dict([(code, reason.title()) for code, (reason, description) in BaseHTTPRequestHandler.responses.items()]) class HTTPException(TracBaseError): __metaclass__ = ABCMeta def __init__(self, detail, *args): """Factory for HTTPException classes.""" if isinstance(detail, TracBaseError): self.detail = detail.message self.reason = detail.title else: self.detail = detail if args: self.detail = self.detail % args super(HTTPException, self).__init__('%s %s (%s)' % (self.code, self.reason, self.detail)) @property def message(self): # The message is based on the e.detail, which can be an Exception # object, but not a TracError one: when creating HTTPException, # a TracError.message is directly assigned to e.detail if isinstance(self.detail, Exception): # not a TracBaseError message = exception_to_unicode(self.detail) elif isinstance(self.detail, Fragment): # TracBaseError markup message = self.detail else: message = to_unicode(self.detail) return message @property def title(self): try: # We first try to get localized error messages here, but we # should ignore secondary errors if the main error was also # due to i18n issues title = _("Error") if self.reason: if title.lower() in self.reason.lower(): title = self.reason else: title = _("Error: %(message)s", message=self.reason) except Exception: title = "Error" return title @classmethod def subclass(cls, name, code): """Create a new Exception class representing a HTTP status code.""" reason = HTTP_STATUS.get(code, 'Unknown') new_class = new.classobj(name, (HTTPException,), { '__doc__': 'Exception for HTTP %d %s' % (code, reason) }) new_class.code = code new_class.reason = reason return new_class _HTTPException_subclass_names = [] for code in [code for code in HTTP_STATUS if code >= 400]: exc_name = HTTP_STATUS[code].replace(' ', '').replace('-', '') # 2.5 compatibility hack: if exc_name == 'InternalServerError': exc_name = 'InternalError' if exc_name.lower().startswith('http'): exc_name = exc_name[4:] exc_name = 'HTTP' + exc_name setattr(sys.modules[__name__], exc_name, HTTPException.subclass(exc_name, code)) _HTTPException_subclass_names.append(exc_name) del code, exc_name class _FieldStorage(cgi.FieldStorage): """Our own version of cgi.FieldStorage, with tweaks.""" def read_multi(self, *args, **kwargs): try: cgi.FieldStorage.read_multi(self, *args, **kwargs) except ValueError: # Most likely "Invalid boundary in multipart form", # possibly an upload of a .mht file? See #9880. self.read_single() class _RequestArgs(dict): """Dictionary subclass that provides convenient access to request parameters that may contain multiple values.""" def getfirst(self, name, default=None): """Return the first value for the specified parameter, or `default` if the parameter was not provided. """ if name not in self: return default val = self[name] if isinstance(val, list): val = val[0] return val def getlist(self, name): """Return a list of values for the specified parameter, even if only one value was provided. """ if name not in self: return [] val = self[name] if not isinstance(val, list): val = [val] return val def parse_arg_list(query_string): """Parse a query string into a list of `(name, value)` tuples. :Since 1.1.2: a leading `?` is stripped from `query_string`.""" args = [] if not query_string: return args query_string = query_string.lstrip('?') for arg in query_string.split('&'): nv = arg.split('=', 1) if len(nv) == 2: (name, value) = nv else: (name, value) = (nv[0], empty) name = unquote(name.replace('+', ' ')) if isinstance(name, str): name = unicode(name, 'utf-8') value = unquote(value.replace('+', ' ')) if isinstance(value, str): value = unicode(value, 'utf-8') args.append((name, value)) return args def arg_list_to_args(arg_list): """Convert a list of `(name, value)` tuples into into a `_RequestArgs`.""" args = _RequestArgs() for name, value in arg_list: if name in args: if isinstance(args[name], list): args[name].append(value) else: args[name] = [args[name], value] else: args[name] = value return args class RequestDone(TracBaseError): """Marker exception that indicates whether request processing has completed and a response was sent. """ iterable = None def __init__(self, iterable=None): self.iterable = iterable class Cookie(SimpleCookie): def load(self, rawdata, ignore_parse_errors=False): if ignore_parse_errors: self.bad_cookies = [] self._BaseCookie__set = self._loose_set SimpleCookie.load(self, rawdata) if ignore_parse_errors: self._BaseCookie__set = self._strict_set for key in self.bad_cookies: del self[key] _strict_set = BaseCookie._BaseCookie__set def _loose_set(self, key, real_value, coded_value): # If a key appears multiple times, the first occurrence has the # narrowest scope, keep that if key in self: return try: self._strict_set(key, real_value, coded_value) except CookieError: self.bad_cookies.append(key) dict.__setitem__(self, key, None) class Request(object): """Represents a HTTP request/response pair. This class provides a convenience API over WSGI. """ def __init__(self, environ, start_response): """Create the request wrapper. :param environ: The WSGI environment dict :param start_response: The WSGI callback for starting the response :param callbacks: A dictionary of functions that are used to lazily evaluate attribute lookups """ self.environ = environ self._start_response = start_response self._write = None self._status = '200 OK' self._response = None self._outheaders = [] self._outcharset = None self.outcookie = Cookie() self.callbacks = { 'arg_list': Request._parse_arg_list, 'args': lambda req: arg_list_to_args(req.arg_list), 'languages': Request._parse_languages, 'incookie': Request._parse_cookies, '_inheaders': Request._parse_headers } self.redirect_listeners = [] self.base_url = self.environ.get('trac.base_url') if not self.base_url: self.base_url = self._reconstruct_url() self.href = Href(self.base_path) self.abs_href = Href(self.base_url) def __getattr__(self, name): """Performs lazy attribute lookup by delegating to the functions in the callbacks dictionary.""" if name in self.callbacks: value = self.callbacks[name](self) setattr(self, name, value) return value raise AttributeError(name) def __repr__(self): uri = self.environ.get('PATH_INFO', '') qs = self.query_string if qs: uri += '?' + qs return '<%s "%s %r">' % (self.__class__.__name__, self.method, uri) # Public API @lazy def is_xhr(self): """Returns `True` if the request is an `XMLHttpRequest`. :since: 1.1.6 """ return self.get_header('X-Requested-With') == 'XMLHttpRequest' @property def method(self): """The HTTP method of the request""" return self.environ['REQUEST_METHOD'] @property def path_info(self): """Path inside the application""" path_info = self.environ.get('PATH_INFO', '') try: return unicode(path_info, 'utf-8') except UnicodeDecodeError: raise HTTPNotFound(_("Invalid URL encoding (was %(path_info)r)", path_info=path_info)) @property def query_string(self): """Query part of the request""" return self.environ.get('QUERY_STRING', '') @property def remote_addr(self): """IP address of the remote user""" return self.environ.get('REMOTE_ADDR') @property def remote_user(self): """ Name of the remote user. Will be `None` if the user has not logged in using HTTP authentication. """ user = self.environ.get('REMOTE_USER') if user is not None: return to_unicode(user) @property def scheme(self): """The scheme of the request URL""" return self.environ['wsgi.url_scheme'] @property def base_path(self): """The root path of the application""" return self.environ.get('SCRIPT_NAME', '') @property def server_name(self): """Name of the server""" return self.environ['SERVER_NAME'] @property def server_port(self): """Port number the server is bound to""" return int(self.environ['SERVER_PORT']) def add_redirect_listener(self, listener): """Add a callable to be called prior to executing a redirect. The callable is passed the arguments to the `redirect()` call. """ self.redirect_listeners.append(listener) def get_header(self, name): """Return the value of the specified HTTP header, or `None` if there's no such header in the request. """ name = name.lower() for key, value in self._inheaders: if key == name: return value return None def send_response(self, code=200): """Set the status code of the response.""" self._status = '%s %s' % (code, HTTP_STATUS.get(code, 'Unknown')) def send_header(self, name, value): """Send the response header with the specified name and value. `value` must either be an `unicode` string or can be converted to one (e.g. numbers, ...) """ lower_name = name.lower() if lower_name == 'content-type': ctpos = value.find('charset=') if ctpos >= 0: self._outcharset = value[ctpos + 8:].strip() elif lower_name == 'content-length': self._content_length = int(value) self._outheaders.append((name, unicode(value).encode('utf-8'))) def end_headers(self): """Must be called after all headers have been sent and before the actual content is written. """ self._send_cookie_headers() self._write = self._start_response(self._status, self._outheaders) def check_modified(self, datetime, extra=''): """Check the request "If-None-Match" header against an entity tag. The entity tag is generated from the specified last modified time (`datetime`), optionally appending an `extra` string to indicate variants of the requested resource. That `extra` parameter can also be a list, in which case the MD5 sum of the list content will be used. If the generated tag matches the "If-None-Match" header of the request, this method sends a "304 Not Modified" response to the client. Otherwise, it adds the entity tag as an "ETag" header to the response so that consecutive requests can be cached. """ if isinstance(extra, list): m = md5() for elt in extra: m.update(repr(elt)) extra = m.hexdigest() etag = 'W/"%s/%s/%s"' % (self.authname, http_date(datetime), extra) inm = self.get_header('If-None-Match') if not inm or inm != etag: self.send_header('ETag', etag) else: self.send_response(304) self.send_header('Content-Length', 0) self.end_headers() raise RequestDone _trident_re = re.compile(r' Trident/([0-9]+)') def redirect(self, url, permanent=False): """Send a redirect to the client, forwarding to the specified URL. The `url` may be relative or absolute, relative URLs will be translated appropriately. """ for listener in self.redirect_listeners: listener(self, url, permanent) if permanent: status = 301 # 'Moved Permanently' elif self.method == 'POST': status = 303 # 'See Other' -- safe to use in response to a POST else: status = 302 # 'Found' -- normal temporary redirect self.send_response(status) if not url.startswith(('http://', 'https://')): # Make sure the URL is absolute scheme, host = urlparse.urlparse(self.base_url)[:2] url = urlparse.urlunparse((scheme, host, url, None, None, None)) # Workaround #10382, IE6-IE9 bug when post and redirect with hash if status == 303 and '#' in url: user_agent = self.environ.get('HTTP_USER_AGENT', '') match_trident = self._trident_re.search(user_agent) if ' MSIE ' in user_agent and \ (not match_trident or int(match_trident.group(1)) < 6): url = url.replace('#', '#__msie303:') self.send_header('Location', url) self.send_header('Content-Type', 'text/plain') self.send_header('Content-Length', 0) self.send_header('Pragma', 'no-cache') self.send_header('Cache-Control', 'no-cache') self.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT') self.end_headers() raise RequestDone def send(self, content, content_type='text/html', status=200): self.send_response(status) self.send_header('Cache-Control', 'must-revalidate') self.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT') self.send_header('Content-Type', content_type + ';charset=utf-8') if isinstance(content, basestring): self.send_header('Content-Length', len(content)) self.end_headers() if self.method != 'HEAD': self.write(content) raise RequestDone def send_error(self, exc_info, template='error.html', content_type='text/html', status=500, env=None, data={}): try: if template.endswith('.html'): if env: from trac.web.chrome import Chrome, add_stylesheet add_stylesheet(self, 'common/css/code.css') try: data = Chrome(env).render_template(self, template, data, 'text/html') except Exception: # second chance rendering, in "safe" mode data['trac_error_rendering'] = True data = Chrome(env).render_template(self, template, data, 'text/html') else: content_type = 'text/plain' data = '%s\n\n%s: %s' % (data.get('title'), data.get('type'), data.get('message')) except Exception: # failed to render data = get_last_traceback() content_type = 'text/plain' if isinstance(data, unicode): data = data.encode('utf-8') self.send_response(status) self._outheaders = [] self.send_header('Cache-Control', 'must-revalidate') self.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT') self.send_header('Content-Type', content_type + ';charset=utf-8') self.send_header('Content-Length', len(data)) self._send_cookie_headers() self._write = self._start_response(self._status, self._outheaders, exc_info) if self.method != 'HEAD': self.write(data) raise RequestDone def send_no_content(self): self.send_response(204) self.send_header('Content-Length', 0) self.send_header('Content-Type', 'text/plain') self.end_headers() raise RequestDone def send_file(self, path, mimetype=None): """Send a local file to the browser. This method includes the "Last-Modified", "Content-Type" and "Content-Length" headers in the response, corresponding to the file attributes. It also checks the last modification time of the local file against the "If-Modified-Since" provided by the user agent, and sends a "304 Not Modified" response if it matches. """ if not os.path.isfile(path): raise HTTPNotFound(_("File %(path)s not found", path=path)) stat = os.stat(path) mtime = datetime.fromtimestamp(stat.st_mtime, localtz) last_modified = http_date(mtime) if last_modified == self.get_header('If-Modified-Since'): self.send_response(304) self.send_header('Content-Length', 0) self.end_headers() raise RequestDone if not mimetype: mimetype = mimetypes.guess_type(path)[0] or \ 'application/octet-stream' self.send_response(200) self.send_header('Content-Type', mimetype) self.send_header('Content-Length', stat.st_size) self.send_header('Last-Modified', last_modified) use_xsendfile = getattr(self, 'use_xsendfile', False) if use_xsendfile: xsendfile_header = getattr(self, 'xsendfile_header', None) if xsendfile_header: self.send_header(xsendfile_header, os.path.abspath(path)) else: use_xsendfile = False self.end_headers() if not use_xsendfile and self.method != 'HEAD': fileobj = open(path, 'rb') file_wrapper = self.environ.get('wsgi.file_wrapper', _FileWrapper) self._response = file_wrapper(fileobj, 4096) raise RequestDone def read(self, size=None): """Read the specified number of bytes from the request body.""" fileobj = self.environ['wsgi.input'] if size is None: size = self.get_header('Content-Length') if size is None: size = -1 else: size = int(size) data = fileobj.read(size) return data CHUNK_SIZE = 4096 def write(self, data): """Write the given data to the response body. *data* **must** be a `str` string or an iterable instance which iterates `str` strings, encoded with the charset which has been specified in the ``'Content-Type'`` header or UTF-8 otherwise. Note that when the ``'Content-Length'`` header is specified, its value either corresponds to the length of *data*, or, if there are multiple calls to `write`, to the cumulative length of the *data* arguments. """ if not self._write: self.end_headers() try: chunk_size = self.CHUNK_SIZE bufsize = 0 buf = [] buf_append = buf.append if isinstance(data, basestring): data = [data] for chunk in data: if isinstance(chunk, unicode): raise ValueError("Can't send unicode content") if not chunk: continue bufsize += len(chunk) buf_append(chunk) if bufsize >= chunk_size: self._write(''.join(buf)) bufsize = 0 buf[:] = () if bufsize > 0: self._write(''.join(buf)) except (IOError, socket.error) as e: if e.args[0] in (errno.EPIPE, errno.ECONNRESET, 10053, 10054): raise RequestDone # Note that mod_wsgi raises an IOError with only a message # if the client disconnects if 'mod_wsgi.version' in self.environ and \ e.args[0] in ('failed to write data', 'client connection closed'): raise RequestDone raise # Internal methods def _parse_arg_list(self): """Parse the supplied request parameters into a list of `(name, value)` tuples. """ fp = self.environ['wsgi.input'] # Avoid letting cgi.FieldStorage consume the input stream when the # request does not contain form data ctype = self.get_header('Content-Type') if ctype: ctype, options = cgi.parse_header(ctype) if ctype not in ('application/x-www-form-urlencoded', 'multipart/form-data'): fp = StringIO('') # Python 2.6 introduced a backwards incompatible change for # FieldStorage where QUERY_STRING is no longer ignored for POST # requests. We'll keep the pre 2.6 behaviour for now... if self.method == 'POST': qs_on_post = self.environ.pop('QUERY_STRING', '') fs = _FieldStorage(fp, environ=self.environ, keep_blank_values=True) if self.method == 'POST': self.environ['QUERY_STRING'] = qs_on_post args = [] for value in fs.list or (): try: name = unicode(value.name, 'utf-8') if not value.filename: value = unicode(value.value, 'utf-8') except UnicodeDecodeError as e: raise HTTPBadRequest( _("Invalid encoding in form data: %(msg)s", msg=exception_to_unicode(e))) args.append((name, value)) return args def _parse_cookies(self): cookies = Cookie() header = self.get_header('Cookie') if header: cookies.load(header, ignore_parse_errors=True) return cookies def _parse_headers(self): headers = [(name[5:].replace('_', '-').lower(), value) for name, value in self.environ.items() if name.startswith('HTTP_')] if 'CONTENT_LENGTH' in self.environ: headers.append(('content-length', self.environ['CONTENT_LENGTH'])) if 'CONTENT_TYPE' in self.environ: headers.append(('content-type', self.environ['CONTENT_TYPE'])) return headers def _parse_languages(self): """The list of languages preferred by the remote user, taken from the ``Accept-Language`` header. """ header = self.get_header('Accept-Language') or 'en-us' langs = [] for i, lang in enumerate(header.split(',')): code, params = cgi.parse_header(lang) q = 1 if 'q' in params: try: q = float(params['q']) except ValueError: q = 0 langs.append((-q, i, code)) langs.sort() return [code for q, i, code in langs] def _reconstruct_url(self): """Reconstruct the absolute base URL of the application.""" host = self.get_header('Host') if not host: # Missing host header, so reconstruct the host from the # server name and port default_port = {'http': 80, 'https': 443} if self.server_port and self.server_port != \ default_port[self.scheme]: host = '%s:%d' % (self.server_name, self.server_port) else: host = self.server_name return urlparse.urlunparse((self.scheme, host, self.base_path, None, None, None)) def _send_cookie_headers(self): for name in self.outcookie.keys(): path = self.outcookie[name].get('path') if path: path = path.replace(' ', '%20') \ .replace(';', '%3B') \ .replace(',', '%3C') self.outcookie[name]['path'] = path cookies = to_unicode(self.outcookie.output(header='')).encode('utf-8') for cookie in cookies.splitlines(): self._outheaders.append(('Set-Cookie', cookie.strip())) __no_apidoc__ = _HTTPException_subclass_names
36.552415
79
0.595978
4a14ee891d9e87c09caaf21d1806b97bd6a6628c
9,455
py
Python
service/mock.py
profesormig/quimica3a
a453f0d7485ebc4b2d7b06a72b44c6c179a3bbd4
[ "BSD-3-Clause" ]
null
null
null
service/mock.py
profesormig/quimica3a
a453f0d7485ebc4b2d7b06a72b44c6c179a3bbd4
[ "BSD-3-Clause" ]
null
null
null
service/mock.py
profesormig/quimica3a
a453f0d7485ebc4b2d7b06a72b44c6c179a3bbd4
[ "BSD-3-Clause" ]
null
null
null
""" Driver factory (non-standard mock but useful for testing) """ import uuid from threepio import logger from rtwo.models.instance import Instance from rtwo.driver import MockDriver from rtwo.drivers.openstack_network import NetworkManager from rtwo.drivers.common import _connect_to_keystone_v3, _token_to_keystone_scoped_project # Globals.. ALL_VOLUMES = [] ALL_INSTANCES = [] ALL_MACHINES = [] ALL_SIZES = [] ALL_NETWORKS = [] ALL_SUBNETS = [] ALL_ROUTERS = [] ALL_PORTS = [] ALL_IPS = [] class AtmosphereMockNetworkManager(NetworkManager): """ NOTE: Mock manager is likely more-than-feature-complete Once we are sure that no other overrides are necessary, we can cull the extra methods. """ def __init__(self, *args, **kwargs): self.neutron = None self.default_router = None self.all_networks = ALL_NETWORKS self.all_subnets = ALL_SUBNETS self.all_routers = ALL_ROUTERS self.all_ports = ALL_PORTS @staticmethod def create_manager(core_identity): return AtmosphereMockNetworkManager( core_identity) def tenant_networks(self, tenant_id=None): return [] def get_tenant_id(self): return 1 def get_credentials(self): """ Return the user_id and tenant_id of the network manager """ return { 'user_id':1, 'tenant_id':1 } def disassociate_floating_ip(self, server_id): return '0.0.0.0' def associate_floating_ip(self, server_id): return '0.0.0.0' def create_port(self, server_id, network_id, **kwargs): port = kwargs self.all_ports.append(port) return port def find_server_ports(self, server_id): return self.all_ports def list_floating_ips(self): return ['0.0.0.0'] def rename_security_group(self, project, security_group_name=None): return True def lc_list_networks(self, *args, **kwargs): """ Call neutron list networks and convert to libcloud objects """ return [] def get_network(self, network_id): for net in self.all_networks: if network_id == net['id']: return net return None def get_subnet(self, subnet_id): for subnet in self.all_subnets: if subnet_id == subnet['id']: return subnet return None def get_port(self, port_id): ports = self.all_ports if not ports: return [] for port in ports: if port['id'] == port_id: return port return None def list_networks(self, *args, **kwargs): """ NOTE: kwargs can be: tenant_id=, or any other attr listed in the details of a network. """ return self.all_networks def list_subnets(self): return self.all_subnets def list_routers(self): return self.all_routers def list_ports(self, **kwargs): """ Options: subnet_id=subnet.id device_id=device.id ip_address=111.222.333.444 """ return self.all_ports def create_network(self, neutron, network_name): network = {'name': network_name, 'admin_state_up': True} self.all_networks.append(network) return network def validate_cidr(self, cidr): return True def create_subnet(self, neutron, subnet_name, network_id, ip_version=4, cidr=None, dns_nameservers=[], subnet_pool_id=None): subnet = { 'name': subnet_name, 'network_id': network_id, 'ip_version': ip_version, } if subnet_pool_id: subnet['subnetpool_id'] = subnet_pool_id else: if not dns_nameservers: dns_nameservers = ['8.8.8.8', '8.8.4.4'] subnet['dns_nameservers'] = dns_nameservers subnet['cidr'] = cidr logger.debug("Creating subnet - %s" % subnet) self.all_subnets.append(subnet) return subnet def create_router(self, neutron, router_name): existing_routers = self.find_router(router_name) if existing_routers: logger.info('Router %s already exists' % router_name) return existing_routers[0] router = {'name': router_name, 'admin_state_up': True} self.all_routers.append(router) return router def add_router_interface(self, router, subnet, interface_name=None): interface_obj = {"name":interface_name} return interface_obj def set_router_gateway(self, neutron, router_name, external_network_name='ext_net'): """ Must be run as admin """ body = {'router_id': router_name, 'network_id': external_network_name} return body def remove_router_gateway(self, router_name): return def remove_router_interface(self, neutron, router_name, subnet_name): return def delete_router(self, neutron, router_name): return def delete_subnet(self, neutron, subnet_name): return def delete_network(self, neutron, network_name): return def delete_port(self, port): return class MockInstance(Instance): def __init__(self, id=None, provider=None, source=None, ip=None, extra={}, *args, **kwargs): identifier = id if not identifier: identifier = kwargs.get('uuid', uuid.uuid4()) if not ip: ip = '0.0.0.0' self.id = identifier self.alias = identifier self.provider = provider self.name = kwargs.get('name', "Mock instance %s" % identifier) self.source = source self.ip = ip self._node = None self.extra = extra def json(self): return self.__dict__ class AtmosphereMockDriver(MockDriver): all_volumes = ALL_VOLUMES all_instances = ALL_INSTANCES all_machines = ALL_MACHINES all_sizes = ALL_SIZES def is_valid(self): """ Performs validation on the driver -- for most drivers, this will mean you actually have to _call something_ on the API. if it succeeds, the driver is valid. """ return True def list_all_volumes(self, *args, **kwargs): """ Return the InstanceClass representation of a libcloud node """ return self.all_volumes def list_all_instances(self, **kwargs): """ Return the InstanceClass representation of a libcloud node """ return self.all_instances def get_instance(self, instance_id, *args, **kwargs): """ Return the InstanceClass representation of a libcloud node """ instances = self.list_all_instances() instance = [inst for inst in instances if inst.id == instance_id] if not instance: return None return instance[0] def add_core_instance(self, core_instance): extra = {} extra['metadata'] = {'iplant_suspend_fix': False} return self.create_instance( id=str(core_instance.provider_alias), ip=core_instance.ip_address, name=core_instance.name, extra=extra) def list_instances(self, **kwargs): """ Return the InstanceClass representation of a libcloud node """ return self.all_instances def list_machines(self, *args, **kwargs): """ Return the MachineClass representation of a libcloud NodeImage """ return self.all_machines def list_sizes(self, *args, **kwargs): """ Return the SizeClass representation of a libcloud NodeSize """ return self.all_sizes def list_locations(self, *args, **kwargs): return [] def create_instance(self, *args, **kwargs): """ Return the InstanceClass representation of a libcloud node """ new_instance = MockInstance(*args, **kwargs) self.all_instances.append(new_instance) return new_instance def deploy_instance(self, *args, **kwargs): return True def reset_network(self, *args, **kwargs): return True def reboot_instance(self, *args, **kwargs): return True def start_instance(self, *args, **kwargs): return True def stop_instance(self, *args, **kwargs): return True def resume_instance(self, *args, **kwargs): return True def confirm_resize(self, *args, **kwargs): return True def resize_instance(self, *args, **kwargs): return True def suspend_instance(self, *args, **kwargs): return True def destroy_instance(self, new_instance, *args, **kwargs): index = self.all_instances.index(new_instance) return self.all_instances.pop(index) def boot_volume(self, *args, **kwargs): raise NotImplementedError() def list_volumes(self, *args, **kwargs): return self.all_volumes def create_volume(self, *args, **kwargs): raise NotImplementedError() def destroy_volume(self, *args, **kwargs): raise NotImplementedError() def attach_volume(self, *args, **kwargs): raise NotImplementedError() def detach_volume(self, *args, **kwargs): raise NotImplementedError()
27.973373
164
0.615865
4a14eedbfb5699fce1da8c8944b4305da90742f8
395,138
py
Python
vscode/extensions/ms-python.python-2019.2.5558/pythonFiles/lib/python/ptvsd/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py
L2D22/dotfiles
0a01648e338159ef0cd998f6e0265beb7dc06fbf
[ "MIT" ]
null
null
null
vscode/extensions/ms-python.python-2019.2.5558/pythonFiles/lib/python/ptvsd/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py
L2D22/dotfiles
0a01648e338159ef0cd998f6e0265beb7dc06fbf
[ "MIT" ]
null
null
null
vscode/extensions/ms-python.python-2019.2.5558/pythonFiles/lib/python/ptvsd/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py
L2D22/dotfiles
0a01648e338159ef0cd998f6e0265beb7dc06fbf
[ "MIT" ]
null
null
null
# Automatically generated code. # Do not edit manually. # Generated by running: __main__pydevd_gen_debug_adapter_protocol.py from .pydevd_base_schema import BaseSchema, register, register_request, register_response, register_event @register class ProtocolMessage(BaseSchema): """ Base class of requests, responses, and events. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "description": "Message type.", "_enum": [ "request", "response", "event" ] } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, type, seq=-1, **kwargs): """ :param string type: Message type. :param integer seq: Sequence number. """ self.type = type self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'seq': self.seq, } dct.update(self.kwargs) return dct @register class Request(BaseSchema): """ A client or debug adapter initiated request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "description": "The command to execute." }, "arguments": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Object containing arguments for the command." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, command, seq=-1, arguments=None, **kwargs): """ :param string type: :param string command: The command to execute. :param integer seq: Sequence number. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] arguments: Object containing arguments for the command. """ self.type = 'request' self.command = command self.seq = seq self.arguments = arguments self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'seq': self.seq, } if self.arguments is not None: dct['arguments'] = self.arguments dct.update(self.kwargs) return dct @register class Event(BaseSchema): """ A debug adapter initiated event. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "description": "Type of event." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Event-specific information." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, event, seq=-1, body=None, **kwargs): """ :param string type: :param string event: Type of event. :param integer seq: Sequence number. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Event-specific information. """ self.type = 'event' self.event = event self.seq = seq self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'seq': self.seq, } if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register class Response(BaseSchema): """ Response for a request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_response('error') @register class ErrorResponse(BaseSchema): """ On error (whenever 'success' is false), the body can provide more details. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "error": { "$ref": "#/definitions/Message", "description": "An optional, structured error message." } } } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param ErrorResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = ErrorResponseBody() else: self.body = ErrorResponseBody(**body) if body.__class__ != ErrorResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_event('initialized') @register class InitializedEvent(BaseSchema): """ This event indicates that the debug adapter is ready to accept configuration requests (e.g. SetBreakpointsRequest, SetExceptionBreakpointsRequest). A debug adapter is expected to send this event when it is ready to accept configuration requests (but not before the 'initialize' request has finished). The sequence of events/requests is as follows: - adapters sends 'initialized' event (after the 'initialize' request has returned) - frontend sends zero or more 'setBreakpoints' requests - frontend sends one 'setFunctionBreakpoints' request - frontend sends a 'setExceptionBreakpoints' request if one or more 'exceptionBreakpointFilters' have been defined (or if 'supportsConfigurationDoneRequest' is not defined or false) - frontend sends other future configuration requests - frontend sends one 'configurationDone' request to indicate the end of the configuration. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "initialized" ] }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Event-specific information." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, seq=-1, body=None, **kwargs): """ :param string type: :param string event: :param integer seq: Sequence number. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Event-specific information. """ self.type = 'event' self.event = 'initialized' self.seq = seq self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'seq': self.seq, } if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_event('stopped') @register class StoppedEvent(BaseSchema): """ The event indicates that the execution of the debuggee has stopped due to some condition. This can be caused by a break point previously set, a stepping action has completed, by executing a debugger statement etc. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "stopped" ] }, "body": { "type": "object", "properties": { "reason": { "type": "string", "description": "The reason for the event.\nFor backward compatibility this string is shown in the UI if the 'description' attribute is missing (but it must not be translated).", "_enum": [ "step", "breakpoint", "exception", "pause", "entry", "goto" ] }, "description": { "type": "string", "description": "The full reason for the event, e.g. 'Paused on exception'. This string is shown in the UI as is and must be translated." }, "threadId": { "type": "integer", "description": "The thread which was stopped." }, "preserveFocusHint": { "type": "boolean", "description": "A value of true hints to the frontend that this event should not change the focus." }, "text": { "type": "string", "description": "Additional information. E.g. if reason is 'exception', text contains the exception name. This string is shown in the UI." }, "allThreadsStopped": { "type": "boolean", "description": "If 'allThreadsStopped' is true, a debug adapter can announce that all threads have stopped.\n- The client should use this information to enable that all threads can be expanded to access their stacktraces.\n- If the attribute is missing or false, only the thread with the given threadId can be expanded." } }, "required": [ "reason" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param StoppedEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'stopped' if body is None: self.body = StoppedEventBody() else: self.body = StoppedEventBody(**body) if body.__class__ != StoppedEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('continued') @register class ContinuedEvent(BaseSchema): """ The event indicates that the execution of the debuggee has continued. Please note: a debug adapter is not expected to send this event in response to a request that implies that execution continues, e.g. 'launch' or 'continue'. It is only necessary to send a 'continued' event if there was no previous request that implied this. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "continued" ] }, "body": { "type": "object", "properties": { "threadId": { "type": "integer", "description": "The thread which was continued." }, "allThreadsContinued": { "type": "boolean", "description": "If 'allThreadsContinued' is true, a debug adapter can announce that all threads have continued." } }, "required": [ "threadId" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param ContinuedEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'continued' if body is None: self.body = ContinuedEventBody() else: self.body = ContinuedEventBody(**body) if body.__class__ != ContinuedEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('exited') @register class ExitedEvent(BaseSchema): """ The event indicates that the debuggee has exited and returns its exit code. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "exited" ] }, "body": { "type": "object", "properties": { "exitCode": { "type": "integer", "description": "The exit code returned from the debuggee." } }, "required": [ "exitCode" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param ExitedEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'exited' if body is None: self.body = ExitedEventBody() else: self.body = ExitedEventBody(**body) if body.__class__ != ExitedEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('terminated') @register class TerminatedEvent(BaseSchema): """ The event indicates that debugging of the debuggee has terminated. This does **not** mean that the debuggee itself has exited. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "terminated" ] }, "body": { "type": "object", "properties": { "restart": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "A debug adapter may set 'restart' to true (or to an arbitrary object) to request that the front end restarts the session.\nThe value is not interpreted by the client and passed unmodified as an attribute '__restart' to the 'launch' and 'attach' requests." } } } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, seq=-1, body=None, **kwargs): """ :param string type: :param string event: :param integer seq: Sequence number. :param TerminatedEventBody body: """ self.type = 'event' self.event = 'terminated' self.seq = seq if body is None: self.body = TerminatedEventBody() else: self.body = TerminatedEventBody(**body) if body.__class__ != TerminatedEventBody else body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'seq': self.seq, } if self.body is not None: dct['body'] = self.body.to_dict() dct.update(self.kwargs) return dct @register_event('thread') @register class ThreadEvent(BaseSchema): """ The event indicates that a thread has started or exited. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "thread" ] }, "body": { "type": "object", "properties": { "reason": { "type": "string", "description": "The reason for the event.", "_enum": [ "started", "exited" ] }, "threadId": { "type": "integer", "description": "The identifier of the thread." } }, "required": [ "reason", "threadId" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param ThreadEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'thread' if body is None: self.body = ThreadEventBody() else: self.body = ThreadEventBody(**body) if body.__class__ != ThreadEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('output') @register class OutputEvent(BaseSchema): """ The event indicates that the target has produced some output. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "output" ] }, "body": { "type": "object", "properties": { "category": { "type": "string", "description": "The output category. If not specified, 'console' is assumed.", "_enum": [ "console", "stdout", "stderr", "telemetry" ] }, "output": { "type": "string", "description": "The output to report." }, "variablesReference": { "type": "number", "description": "If an attribute 'variablesReference' exists and its value is > 0, the output contains objects which can be retrieved by passing 'variablesReference' to the 'variables' request." }, "source": { "$ref": "#/definitions/Source", "description": "An optional source location where the output was produced." }, "line": { "type": "integer", "description": "An optional source location line where the output was produced." }, "column": { "type": "integer", "description": "An optional source location column where the output was produced." }, "data": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Optional data to report. For the 'telemetry' category the data will be sent to telemetry, for the other categories the data is shown in JSON format." } }, "required": [ "output" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param OutputEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'output' if body is None: self.body = OutputEventBody() else: self.body = OutputEventBody(**body) if body.__class__ != OutputEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('breakpoint') @register class BreakpointEvent(BaseSchema): """ The event indicates that some information about a breakpoint has changed. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "breakpoint" ] }, "body": { "type": "object", "properties": { "reason": { "type": "string", "description": "The reason for the event.", "_enum": [ "changed", "new", "removed" ] }, "breakpoint": { "$ref": "#/definitions/Breakpoint", "description": "The 'id' attribute is used to find the target breakpoint and the other attributes are used as the new values." } }, "required": [ "reason", "breakpoint" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param BreakpointEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'breakpoint' if body is None: self.body = BreakpointEventBody() else: self.body = BreakpointEventBody(**body) if body.__class__ != BreakpointEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('module') @register class ModuleEvent(BaseSchema): """ The event indicates that some information about a module has changed. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "module" ] }, "body": { "type": "object", "properties": { "reason": { "type": "string", "description": "The reason for the event.", "enum": [ "new", "changed", "removed" ] }, "module": { "$ref": "#/definitions/Module", "description": "The new, changed, or removed module. In case of 'removed' only the module id is used." } }, "required": [ "reason", "module" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param ModuleEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'module' if body is None: self.body = ModuleEventBody() else: self.body = ModuleEventBody(**body) if body.__class__ != ModuleEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('loadedSource') @register class LoadedSourceEvent(BaseSchema): """ The event indicates that some source has been added, changed, or removed from the set of all loaded sources. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "loadedSource" ] }, "body": { "type": "object", "properties": { "reason": { "type": "string", "description": "The reason for the event.", "enum": [ "new", "changed", "removed" ] }, "source": { "$ref": "#/definitions/Source", "description": "The new, changed, or removed source." } }, "required": [ "reason", "source" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param LoadedSourceEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'loadedSource' if body is None: self.body = LoadedSourceEventBody() else: self.body = LoadedSourceEventBody(**body) if body.__class__ != LoadedSourceEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('process') @register class ProcessEvent(BaseSchema): """ The event indicates that the debugger has begun debugging a new process. Either one that it has launched, or one that it has attached to. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "process" ] }, "body": { "type": "object", "properties": { "name": { "type": "string", "description": "The logical name of the process. This is usually the full path to process's executable file. Example: /home/example/myproj/program.js." }, "systemProcessId": { "type": "integer", "description": "The system process id of the debugged process. This property will be missing for non-system processes." }, "isLocalProcess": { "type": "boolean", "description": "If true, the process is running on the same computer as the debug adapter." }, "startMethod": { "type": "string", "enum": [ "launch", "attach", "attachForSuspendedLaunch" ], "description": "Describes how the debug engine started debugging this process.", "enumDescriptions": [ "Process was launched under the debugger.", "Debugger attached to an existing process.", "A project launcher component has launched a new process in a suspended state and then asked the debugger to attach." ] } }, "required": [ "name" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param ProcessEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'process' if body is None: self.body = ProcessEventBody() else: self.body = ProcessEventBody(**body) if body.__class__ != ProcessEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_event('capabilities') @register class CapabilitiesEvent(BaseSchema): """ The event indicates that one or more capabilities have changed. Since the capabilities are dependent on the frontend and its UI, it might not be possible to change that at random times (or too late). Consequently this event has a hint characteristic: a frontend can only be expected to make a 'best effort' in honouring individual capabilities but there are no guarantees. Only changed capabilities need to be included, all other capabilities keep their values. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "event" ] }, "event": { "type": "string", "enum": [ "capabilities" ] }, "body": { "type": "object", "properties": { "capabilities": { "$ref": "#/definitions/Capabilities", "description": "The set of updated capabilities." } }, "required": [ "capabilities" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, body, seq=-1, **kwargs): """ :param string type: :param string event: :param CapabilitiesEventBody body: :param integer seq: Sequence number. """ self.type = 'event' self.event = 'capabilities' if body is None: self.body = CapabilitiesEventBody() else: self.body = CapabilitiesEventBody(**body) if body.__class__ != CapabilitiesEventBody else body self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'event': self.event, 'body': self.body.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register_request('runInTerminal') @register class RunInTerminalRequest(BaseSchema): """ This request is sent from the debug adapter to the client to run a command in a terminal. This is typically used to launch the debuggee in a terminal provided by the client. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "runInTerminal" ] }, "arguments": { "type": "RunInTerminalRequestArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param RunInTerminalRequestArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'runInTerminal' if arguments is None: self.arguments = RunInTerminalRequestArguments() else: self.arguments = RunInTerminalRequestArguments(**arguments) if arguments.__class__ != RunInTerminalRequestArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class RunInTerminalRequestArguments(BaseSchema): """ Arguments for 'runInTerminal' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "kind": { "type": "string", "enum": [ "integrated", "external" ], "description": "What kind of terminal to launch." }, "title": { "type": "string", "description": "Optional title of the terminal." }, "cwd": { "type": "string", "description": "Working directory of the command." }, "args": { "type": "array", "items": { "type": "string" }, "description": "List of arguments. The first argument is the command to run." }, "env": { "type": "object", "description": "Environment key-value pairs that are added to or removed from the default environment.", "additionalProperties": { "type": [ "string", "null" ], "description": "Proper values must be strings. A value of 'null' removes the variable from the environment." } } } __refs__ = set(['env']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, cwd, args, kind=None, title=None, env=None, **kwargs): """ :param string cwd: Working directory of the command. :param array args: List of arguments. The first argument is the command to run. :param string kind: What kind of terminal to launch. :param string title: Optional title of the terminal. :param RunInTerminalRequestArgumentsEnv env: Environment key-value pairs that are added to or removed from the default environment. """ self.cwd = cwd self.args = args self.kind = kind self.title = title if env is None: self.env = RunInTerminalRequestArgumentsEnv() else: self.env = RunInTerminalRequestArgumentsEnv(**env) if env.__class__ != RunInTerminalRequestArgumentsEnv else env self.kwargs = kwargs def to_dict(self): dct = { 'cwd': self.cwd, 'args': self.args, } if self.kind is not None: dct['kind'] = self.kind if self.title is not None: dct['title'] = self.title if self.env is not None: dct['env'] = self.env.to_dict() dct.update(self.kwargs) return dct @register_response('runInTerminal') @register class RunInTerminalResponse(BaseSchema): """ Response to 'runInTerminal' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "processId": { "type": "number", "description": "The process ID." }, "shellProcessId": { "type": "number", "description": "The process ID of the terminal shell." } } } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param RunInTerminalResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = RunInTerminalResponseBody() else: self.body = RunInTerminalResponseBody(**body) if body.__class__ != RunInTerminalResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('initialize') @register class InitializeRequest(BaseSchema): """ The 'initialize' request is sent as the first request from the client to the debug adapter in order to configure it with client capabilities and to retrieve capabilities from the debug adapter. Until the debug adapter has responded to with an 'initialize' response, the client must not send any additional requests or events to the debug adapter. In addition the debug adapter is not allowed to send any requests or events to the client until it has responded with an 'initialize' response. The 'initialize' request may only be sent once. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "initialize" ] }, "arguments": { "type": "InitializeRequestArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param InitializeRequestArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'initialize' if arguments is None: self.arguments = InitializeRequestArguments() else: self.arguments = InitializeRequestArguments(**arguments) if arguments.__class__ != InitializeRequestArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class InitializeRequestArguments(BaseSchema): """ Arguments for 'initialize' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "clientID": { "type": "string", "description": "The ID of the (frontend) client using this adapter." }, "clientName": { "type": "string", "description": "The human readable name of the (frontend) client using this adapter." }, "adapterID": { "type": "string", "description": "The ID of the debug adapter." }, "locale": { "type": "string", "description": "The ISO-639 locale of the (frontend) client using this adapter, e.g. en-US or de-CH." }, "linesStartAt1": { "type": "boolean", "description": "If true all line numbers are 1-based (default)." }, "columnsStartAt1": { "type": "boolean", "description": "If true all column numbers are 1-based (default)." }, "pathFormat": { "type": "string", "_enum": [ "path", "uri" ], "description": "Determines in what format paths are specified. The default is 'path', which is the native format." }, "supportsVariableType": { "type": "boolean", "description": "Client supports the optional type attribute for variables." }, "supportsVariablePaging": { "type": "boolean", "description": "Client supports the paging of variables." }, "supportsRunInTerminalRequest": { "type": "boolean", "description": "Client supports the runInTerminal request." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, adapterID, clientID=None, clientName=None, locale=None, linesStartAt1=None, columnsStartAt1=None, pathFormat=None, supportsVariableType=None, supportsVariablePaging=None, supportsRunInTerminalRequest=None, **kwargs): """ :param string adapterID: The ID of the debug adapter. :param string clientID: The ID of the (frontend) client using this adapter. :param string clientName: The human readable name of the (frontend) client using this adapter. :param string locale: The ISO-639 locale of the (frontend) client using this adapter, e.g. en-US or de-CH. :param boolean linesStartAt1: If true all line numbers are 1-based (default). :param boolean columnsStartAt1: If true all column numbers are 1-based (default). :param string pathFormat: Determines in what format paths are specified. The default is 'path', which is the native format. :param boolean supportsVariableType: Client supports the optional type attribute for variables. :param boolean supportsVariablePaging: Client supports the paging of variables. :param boolean supportsRunInTerminalRequest: Client supports the runInTerminal request. """ self.adapterID = adapterID self.clientID = clientID self.clientName = clientName self.locale = locale self.linesStartAt1 = linesStartAt1 self.columnsStartAt1 = columnsStartAt1 self.pathFormat = pathFormat self.supportsVariableType = supportsVariableType self.supportsVariablePaging = supportsVariablePaging self.supportsRunInTerminalRequest = supportsRunInTerminalRequest self.kwargs = kwargs def to_dict(self): dct = { 'adapterID': self.adapterID, } if self.clientID is not None: dct['clientID'] = self.clientID if self.clientName is not None: dct['clientName'] = self.clientName if self.locale is not None: dct['locale'] = self.locale if self.linesStartAt1 is not None: dct['linesStartAt1'] = self.linesStartAt1 if self.columnsStartAt1 is not None: dct['columnsStartAt1'] = self.columnsStartAt1 if self.pathFormat is not None: dct['pathFormat'] = self.pathFormat if self.supportsVariableType is not None: dct['supportsVariableType'] = self.supportsVariableType if self.supportsVariablePaging is not None: dct['supportsVariablePaging'] = self.supportsVariablePaging if self.supportsRunInTerminalRequest is not None: dct['supportsRunInTerminalRequest'] = self.supportsRunInTerminalRequest dct.update(self.kwargs) return dct @register_response('initialize') @register class InitializeResponse(BaseSchema): """ Response to 'initialize' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "description": "The capabilities of this debug adapter.", "type": "Capabilities" } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param Capabilities body: The capabilities of this debug adapter. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message if body is None: self.body = Capabilities() else: self.body = Capabilities(**body) if body.__class__ != Capabilities else body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body.to_dict() dct.update(self.kwargs) return dct @register_request('configurationDone') @register class ConfigurationDoneRequest(BaseSchema): """ The client of the debug protocol must send this request at the end of the sequence of configuration requests (which was started by the 'initialized' event). Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "configurationDone" ] }, "arguments": { "type": "ConfigurationDoneArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, seq=-1, arguments=None, **kwargs): """ :param string type: :param string command: :param integer seq: Sequence number. :param ConfigurationDoneArguments arguments: """ self.type = 'request' self.command = 'configurationDone' self.seq = seq if arguments is None: self.arguments = ConfigurationDoneArguments() else: self.arguments = ConfigurationDoneArguments(**arguments) if arguments.__class__ != ConfigurationDoneArguments else arguments self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'seq': self.seq, } if self.arguments is not None: dct['arguments'] = self.arguments.to_dict() dct.update(self.kwargs) return dct @register class ConfigurationDoneArguments(BaseSchema): """ Arguments for 'configurationDone' request. Note: automatically generated code. Do not edit manually. """ __props__ = {} __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, **kwargs): """ """ self.kwargs = kwargs def to_dict(self): dct = { } dct.update(self.kwargs) return dct @register_response('configurationDone') @register class ConfigurationDoneResponse(BaseSchema): """ Response to 'configurationDone' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('launch') @register class LaunchRequest(BaseSchema): """ The launch request is sent from the client to the debug adapter to start the debuggee with or without debugging (if 'noDebug' is true). Since launching is debugger/runtime specific, the arguments for this request are not part of this specification. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "launch" ] }, "arguments": { "type": "LaunchRequestArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param LaunchRequestArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'launch' if arguments is None: self.arguments = LaunchRequestArguments() else: self.arguments = LaunchRequestArguments(**arguments) if arguments.__class__ != LaunchRequestArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class LaunchRequestArguments(BaseSchema): """ Arguments for 'launch' request. Additional attributes are implementation specific. Note: automatically generated code. Do not edit manually. """ __props__ = { "noDebug": { "type": "boolean", "description": "If noDebug is true the launch request should launch the program without enabling debugging." }, "__restart": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Optional data from the previous, restarted session.\nThe data is sent as the 'restart' attribute of the 'terminated' event.\nThe client should leave the data intact." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, noDebug=None, __restart=None, **kwargs): """ :param boolean noDebug: If noDebug is true the launch request should launch the program without enabling debugging. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] __restart: Optional data from the previous, restarted session. The data is sent as the 'restart' attribute of the 'terminated' event. The client should leave the data intact. """ self.noDebug = noDebug self.__restart = __restart self.kwargs = kwargs def to_dict(self): dct = { } if self.noDebug is not None: dct['noDebug'] = self.noDebug if self.__restart is not None: dct['__restart'] = self.__restart dct.update(self.kwargs) return dct @register_response('launch') @register class LaunchResponse(BaseSchema): """ Response to 'launch' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('attach') @register class AttachRequest(BaseSchema): """ The attach request is sent from the client to the debug adapter to attach to a debuggee that is already running. Since attaching is debugger/runtime specific, the arguments for this request are not part of this specification. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "attach" ] }, "arguments": { "type": "AttachRequestArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param AttachRequestArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'attach' if arguments is None: self.arguments = AttachRequestArguments() else: self.arguments = AttachRequestArguments(**arguments) if arguments.__class__ != AttachRequestArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class AttachRequestArguments(BaseSchema): """ Arguments for 'attach' request. Additional attributes are implementation specific. Note: automatically generated code. Do not edit manually. """ __props__ = { "__restart": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Optional data from the previous, restarted session.\nThe data is sent as the 'restart' attribute of the 'terminated' event.\nThe client should leave the data intact." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, __restart=None, **kwargs): """ :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] __restart: Optional data from the previous, restarted session. The data is sent as the 'restart' attribute of the 'terminated' event. The client should leave the data intact. """ self.__restart = __restart self.kwargs = kwargs def to_dict(self): dct = { } if self.__restart is not None: dct['__restart'] = self.__restart dct.update(self.kwargs) return dct @register_response('attach') @register class AttachResponse(BaseSchema): """ Response to 'attach' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('restart') @register class RestartRequest(BaseSchema): """ Restarts a debug session. If the capability 'supportsRestartRequest' is missing or has the value false, the client will implement 'restart' by terminating the debug adapter first and then launching it anew. A debug adapter can override this default behaviour by implementing a restart request and setting the capability 'supportsRestartRequest' to true. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "restart" ] }, "arguments": { "type": "RestartArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, seq=-1, arguments=None, **kwargs): """ :param string type: :param string command: :param integer seq: Sequence number. :param RestartArguments arguments: """ self.type = 'request' self.command = 'restart' self.seq = seq if arguments is None: self.arguments = RestartArguments() else: self.arguments = RestartArguments(**arguments) if arguments.__class__ != RestartArguments else arguments self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'seq': self.seq, } if self.arguments is not None: dct['arguments'] = self.arguments.to_dict() dct.update(self.kwargs) return dct @register class RestartArguments(BaseSchema): """ Arguments for 'restart' request. Note: automatically generated code. Do not edit manually. """ __props__ = {} __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, **kwargs): """ """ self.kwargs = kwargs def to_dict(self): dct = { } dct.update(self.kwargs) return dct @register_response('restart') @register class RestartResponse(BaseSchema): """ Response to 'restart' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('disconnect') @register class DisconnectRequest(BaseSchema): """ The 'disconnect' request is sent from the client to the debug adapter in order to stop debugging. It asks the debug adapter to disconnect from the debuggee and to terminate the debug adapter. If the debuggee has been started with the 'launch' request, the 'disconnect' request terminates the debuggee. If the 'attach' request was used to connect to the debuggee, 'disconnect' does not terminate the debuggee. This behavior can be controlled with the 'terminateDebuggee' argument (if supported by the debug adapter). Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "disconnect" ] }, "arguments": { "type": "DisconnectArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, seq=-1, arguments=None, **kwargs): """ :param string type: :param string command: :param integer seq: Sequence number. :param DisconnectArguments arguments: """ self.type = 'request' self.command = 'disconnect' self.seq = seq if arguments is None: self.arguments = DisconnectArguments() else: self.arguments = DisconnectArguments(**arguments) if arguments.__class__ != DisconnectArguments else arguments self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'seq': self.seq, } if self.arguments is not None: dct['arguments'] = self.arguments.to_dict() dct.update(self.kwargs) return dct @register class DisconnectArguments(BaseSchema): """ Arguments for 'disconnect' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "restart": { "type": "boolean", "description": "A value of true indicates that this 'disconnect' request is part of a restart sequence." }, "terminateDebuggee": { "type": "boolean", "description": "Indicates whether the debuggee should be terminated when the debugger is disconnected.\nIf unspecified, the debug adapter is free to do whatever it thinks is best.\nA client can only rely on this attribute being properly honored if a debug adapter returns true for the 'supportTerminateDebuggee' capability." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, restart=None, terminateDebuggee=None, **kwargs): """ :param boolean restart: A value of true indicates that this 'disconnect' request is part of a restart sequence. :param boolean terminateDebuggee: Indicates whether the debuggee should be terminated when the debugger is disconnected. If unspecified, the debug adapter is free to do whatever it thinks is best. A client can only rely on this attribute being properly honored if a debug adapter returns true for the 'supportTerminateDebuggee' capability. """ self.restart = restart self.terminateDebuggee = terminateDebuggee self.kwargs = kwargs def to_dict(self): dct = { } if self.restart is not None: dct['restart'] = self.restart if self.terminateDebuggee is not None: dct['terminateDebuggee'] = self.terminateDebuggee dct.update(self.kwargs) return dct @register_response('disconnect') @register class DisconnectResponse(BaseSchema): """ Response to 'disconnect' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('terminate') @register class TerminateRequest(BaseSchema): """ The 'terminate' request is sent from the client to the debug adapter in order to give the debuggee a chance for terminating itself. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "terminate" ] }, "arguments": { "type": "TerminateArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, seq=-1, arguments=None, **kwargs): """ :param string type: :param string command: :param integer seq: Sequence number. :param TerminateArguments arguments: """ self.type = 'request' self.command = 'terminate' self.seq = seq if arguments is None: self.arguments = TerminateArguments() else: self.arguments = TerminateArguments(**arguments) if arguments.__class__ != TerminateArguments else arguments self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'seq': self.seq, } if self.arguments is not None: dct['arguments'] = self.arguments.to_dict() dct.update(self.kwargs) return dct @register class TerminateArguments(BaseSchema): """ Arguments for 'terminate' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "restart": { "type": "boolean", "description": "A value of true indicates that this 'terminate' request is part of a restart sequence." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, restart=None, **kwargs): """ :param boolean restart: A value of true indicates that this 'terminate' request is part of a restart sequence. """ self.restart = restart self.kwargs = kwargs def to_dict(self): dct = { } if self.restart is not None: dct['restart'] = self.restart dct.update(self.kwargs) return dct @register_response('terminate') @register class TerminateResponse(BaseSchema): """ Response to 'terminate' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('setBreakpoints') @register class SetBreakpointsRequest(BaseSchema): """ Sets multiple breakpoints for a single source and clears all previous breakpoints in that source. To clear all breakpoint for a source, specify an empty array. When a breakpoint is hit, a 'stopped' event (with reason 'breakpoint') is generated. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "setBreakpoints" ] }, "arguments": { "type": "SetBreakpointsArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param SetBreakpointsArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'setBreakpoints' if arguments is None: self.arguments = SetBreakpointsArguments() else: self.arguments = SetBreakpointsArguments(**arguments) if arguments.__class__ != SetBreakpointsArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class SetBreakpointsArguments(BaseSchema): """ Arguments for 'setBreakpoints' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "source": { "description": "The source location of the breakpoints; either 'source.path' or 'source.reference' must be specified.", "type": "Source" }, "breakpoints": { "type": "array", "items": { "$ref": "#/definitions/SourceBreakpoint" }, "description": "The code locations of the breakpoints." }, "lines": { "type": "array", "items": { "type": "integer" }, "description": "Deprecated: The code locations of the breakpoints." }, "sourceModified": { "type": "boolean", "description": "A value of true indicates that the underlying source has been modified which results in new breakpoint locations." } } __refs__ = set(['source']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, source, breakpoints=None, lines=None, sourceModified=None, **kwargs): """ :param Source source: The source location of the breakpoints; either 'source.path' or 'source.reference' must be specified. :param array breakpoints: The code locations of the breakpoints. :param array lines: Deprecated: The code locations of the breakpoints. :param boolean sourceModified: A value of true indicates that the underlying source has been modified which results in new breakpoint locations. """ if source is None: self.source = Source() else: self.source = Source(**source) if source.__class__ != Source else source self.breakpoints = breakpoints self.lines = lines self.sourceModified = sourceModified self.kwargs = kwargs def to_dict(self): dct = { 'source': self.source.to_dict(), } if self.breakpoints is not None: dct['breakpoints'] = self.breakpoints if self.lines is not None: dct['lines'] = self.lines if self.sourceModified is not None: dct['sourceModified'] = self.sourceModified dct.update(self.kwargs) return dct @register_response('setBreakpoints') @register class SetBreakpointsResponse(BaseSchema): """ Response to 'setBreakpoints' request. Returned is information about each breakpoint created by this request. This includes the actual code location and whether the breakpoint could be verified. The breakpoints returned are in the same order as the elements of the 'breakpoints' (or the deprecated 'lines') array in the arguments. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "breakpoints": { "type": "array", "items": { "$ref": "#/definitions/Breakpoint" }, "description": "Information about the breakpoints. The array elements are in the same order as the elements of the 'breakpoints' (or the deprecated 'lines') array in the arguments." } }, "required": [ "breakpoints" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param SetBreakpointsResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = SetBreakpointsResponseBody() else: self.body = SetBreakpointsResponseBody(**body) if body.__class__ != SetBreakpointsResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('setFunctionBreakpoints') @register class SetFunctionBreakpointsRequest(BaseSchema): """ Sets multiple function breakpoints and clears all previous function breakpoints. To clear all function breakpoint, specify an empty array. When a function breakpoint is hit, a 'stopped' event (event type 'function breakpoint') is generated. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "setFunctionBreakpoints" ] }, "arguments": { "type": "SetFunctionBreakpointsArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param SetFunctionBreakpointsArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'setFunctionBreakpoints' if arguments is None: self.arguments = SetFunctionBreakpointsArguments() else: self.arguments = SetFunctionBreakpointsArguments(**arguments) if arguments.__class__ != SetFunctionBreakpointsArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class SetFunctionBreakpointsArguments(BaseSchema): """ Arguments for 'setFunctionBreakpoints' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "breakpoints": { "type": "array", "items": { "$ref": "#/definitions/FunctionBreakpoint" }, "description": "The function names of the breakpoints." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, breakpoints, **kwargs): """ :param array breakpoints: The function names of the breakpoints. """ self.breakpoints = breakpoints self.kwargs = kwargs def to_dict(self): dct = { 'breakpoints': self.breakpoints, } dct.update(self.kwargs) return dct @register_response('setFunctionBreakpoints') @register class SetFunctionBreakpointsResponse(BaseSchema): """ Response to 'setFunctionBreakpoints' request. Returned is information about each breakpoint created by this request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "breakpoints": { "type": "array", "items": { "$ref": "#/definitions/Breakpoint" }, "description": "Information about the breakpoints. The array elements correspond to the elements of the 'breakpoints' array." } }, "required": [ "breakpoints" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param SetFunctionBreakpointsResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = SetFunctionBreakpointsResponseBody() else: self.body = SetFunctionBreakpointsResponseBody(**body) if body.__class__ != SetFunctionBreakpointsResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('setExceptionBreakpoints') @register class SetExceptionBreakpointsRequest(BaseSchema): """ The request configures the debuggers response to thrown exceptions. If an exception is configured to break, a 'stopped' event is fired (with reason 'exception'). Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "setExceptionBreakpoints" ] }, "arguments": { "type": "SetExceptionBreakpointsArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param SetExceptionBreakpointsArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'setExceptionBreakpoints' if arguments is None: self.arguments = SetExceptionBreakpointsArguments() else: self.arguments = SetExceptionBreakpointsArguments(**arguments) if arguments.__class__ != SetExceptionBreakpointsArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class SetExceptionBreakpointsArguments(BaseSchema): """ Arguments for 'setExceptionBreakpoints' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "filters": { "type": "array", "items": { "type": "string" }, "description": "IDs of checked exception options. The set of IDs is returned via the 'exceptionBreakpointFilters' capability." }, "exceptionOptions": { "type": "array", "items": { "$ref": "#/definitions/ExceptionOptions" }, "description": "Configuration options for selected exceptions." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, filters, exceptionOptions=None, **kwargs): """ :param array filters: IDs of checked exception options. The set of IDs is returned via the 'exceptionBreakpointFilters' capability. :param array exceptionOptions: Configuration options for selected exceptions. """ self.filters = filters self.exceptionOptions = exceptionOptions self.kwargs = kwargs def to_dict(self): dct = { 'filters': self.filters, } if self.exceptionOptions is not None: dct['exceptionOptions'] = self.exceptionOptions dct.update(self.kwargs) return dct @register_response('setExceptionBreakpoints') @register class SetExceptionBreakpointsResponse(BaseSchema): """ Response to 'setExceptionBreakpoints' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('continue') @register class ContinueRequest(BaseSchema): """ The request starts the debuggee to run again. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "continue" ] }, "arguments": { "type": "ContinueArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param ContinueArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'continue' if arguments is None: self.arguments = ContinueArguments() else: self.arguments = ContinueArguments(**arguments) if arguments.__class__ != ContinueArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class ContinueArguments(BaseSchema): """ Arguments for 'continue' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Continue execution for the specified thread (if possible). If the backend cannot continue on a single thread but will continue on all threads, it should set the 'allThreadsContinued' attribute in the response to true." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, **kwargs): """ :param integer threadId: Continue execution for the specified thread (if possible). If the backend cannot continue on a single thread but will continue on all threads, it should set the 'allThreadsContinued' attribute in the response to true. """ self.threadId = threadId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } dct.update(self.kwargs) return dct @register_response('continue') @register class ContinueResponse(BaseSchema): """ Response to 'continue' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "allThreadsContinued": { "type": "boolean", "description": "If true, the 'continue' request has ignored the specified thread and continued all threads instead. If this attribute is missing a value of 'true' is assumed for backward compatibility." } } } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param ContinueResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = ContinueResponseBody() else: self.body = ContinueResponseBody(**body) if body.__class__ != ContinueResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('next') @register class NextRequest(BaseSchema): """ The request starts the debuggee to run again for one step. The debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "next" ] }, "arguments": { "type": "NextArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param NextArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'next' if arguments is None: self.arguments = NextArguments() else: self.arguments = NextArguments(**arguments) if arguments.__class__ != NextArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class NextArguments(BaseSchema): """ Arguments for 'next' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Execute 'next' for this thread." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, **kwargs): """ :param integer threadId: Execute 'next' for this thread. """ self.threadId = threadId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } dct.update(self.kwargs) return dct @register_response('next') @register class NextResponse(BaseSchema): """ Response to 'next' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('stepIn') @register class StepInRequest(BaseSchema): """ The request starts the debuggee to step into a function/method if possible. If it cannot step into a target, 'stepIn' behaves like 'next'. The debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed. If there are multiple function/method calls (or other targets) on the source line, the optional argument 'targetId' can be used to control into which target the 'stepIn' should occur. The list of possible targets for a given source line can be retrieved via the 'stepInTargets' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "stepIn" ] }, "arguments": { "type": "StepInArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param StepInArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'stepIn' if arguments is None: self.arguments = StepInArguments() else: self.arguments = StepInArguments(**arguments) if arguments.__class__ != StepInArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class StepInArguments(BaseSchema): """ Arguments for 'stepIn' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Execute 'stepIn' for this thread." }, "targetId": { "type": "integer", "description": "Optional id of the target to step into." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, targetId=None, **kwargs): """ :param integer threadId: Execute 'stepIn' for this thread. :param integer targetId: Optional id of the target to step into. """ self.threadId = threadId self.targetId = targetId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } if self.targetId is not None: dct['targetId'] = self.targetId dct.update(self.kwargs) return dct @register_response('stepIn') @register class StepInResponse(BaseSchema): """ Response to 'stepIn' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('stepOut') @register class StepOutRequest(BaseSchema): """ The request starts the debuggee to run again for one step. The debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "stepOut" ] }, "arguments": { "type": "StepOutArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param StepOutArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'stepOut' if arguments is None: self.arguments = StepOutArguments() else: self.arguments = StepOutArguments(**arguments) if arguments.__class__ != StepOutArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class StepOutArguments(BaseSchema): """ Arguments for 'stepOut' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Execute 'stepOut' for this thread." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, **kwargs): """ :param integer threadId: Execute 'stepOut' for this thread. """ self.threadId = threadId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } dct.update(self.kwargs) return dct @register_response('stepOut') @register class StepOutResponse(BaseSchema): """ Response to 'stepOut' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('stepBack') @register class StepBackRequest(BaseSchema): """ The request starts the debuggee to run one step backwards. The debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed. Clients should only call this request if the capability 'supportsStepBack' is true. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "stepBack" ] }, "arguments": { "type": "StepBackArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param StepBackArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'stepBack' if arguments is None: self.arguments = StepBackArguments() else: self.arguments = StepBackArguments(**arguments) if arguments.__class__ != StepBackArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class StepBackArguments(BaseSchema): """ Arguments for 'stepBack' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Execute 'stepBack' for this thread." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, **kwargs): """ :param integer threadId: Execute 'stepBack' for this thread. """ self.threadId = threadId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } dct.update(self.kwargs) return dct @register_response('stepBack') @register class StepBackResponse(BaseSchema): """ Response to 'stepBack' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('reverseContinue') @register class ReverseContinueRequest(BaseSchema): """ The request starts the debuggee to run backward. Clients should only call this request if the capability 'supportsStepBack' is true. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "reverseContinue" ] }, "arguments": { "type": "ReverseContinueArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param ReverseContinueArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'reverseContinue' if arguments is None: self.arguments = ReverseContinueArguments() else: self.arguments = ReverseContinueArguments(**arguments) if arguments.__class__ != ReverseContinueArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class ReverseContinueArguments(BaseSchema): """ Arguments for 'reverseContinue' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Execute 'reverseContinue' for this thread." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, **kwargs): """ :param integer threadId: Execute 'reverseContinue' for this thread. """ self.threadId = threadId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } dct.update(self.kwargs) return dct @register_response('reverseContinue') @register class ReverseContinueResponse(BaseSchema): """ Response to 'reverseContinue' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('restartFrame') @register class RestartFrameRequest(BaseSchema): """ The request restarts execution of the specified stackframe. The debug adapter first sends the response and then a 'stopped' event (with reason 'restart') after the restart has completed. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "restartFrame" ] }, "arguments": { "type": "RestartFrameArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param RestartFrameArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'restartFrame' if arguments is None: self.arguments = RestartFrameArguments() else: self.arguments = RestartFrameArguments(**arguments) if arguments.__class__ != RestartFrameArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class RestartFrameArguments(BaseSchema): """ Arguments for 'restartFrame' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "frameId": { "type": "integer", "description": "Restart this stackframe." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, frameId, **kwargs): """ :param integer frameId: Restart this stackframe. """ self.frameId = frameId self.kwargs = kwargs def to_dict(self): dct = { 'frameId': self.frameId, } dct.update(self.kwargs) return dct @register_response('restartFrame') @register class RestartFrameResponse(BaseSchema): """ Response to 'restartFrame' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('goto') @register class GotoRequest(BaseSchema): """ The request sets the location where the debuggee will continue to run. This makes it possible to skip the execution of code or to executed code again. The code between the current location and the goto target is not executed but skipped. The debug adapter first sends the response and then a 'stopped' event with reason 'goto'. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "goto" ] }, "arguments": { "type": "GotoArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param GotoArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'goto' if arguments is None: self.arguments = GotoArguments() else: self.arguments = GotoArguments(**arguments) if arguments.__class__ != GotoArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class GotoArguments(BaseSchema): """ Arguments for 'goto' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Set the goto target for this thread." }, "targetId": { "type": "integer", "description": "The location where the debuggee will continue to run." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, targetId, **kwargs): """ :param integer threadId: Set the goto target for this thread. :param integer targetId: The location where the debuggee will continue to run. """ self.threadId = threadId self.targetId = targetId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, 'targetId': self.targetId, } dct.update(self.kwargs) return dct @register_response('goto') @register class GotoResponse(BaseSchema): """ Response to 'goto' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('pause') @register class PauseRequest(BaseSchema): """ The request suspenses the debuggee. The debug adapter first sends the response and then a 'stopped' event (with reason 'pause') after the thread has been paused successfully. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "pause" ] }, "arguments": { "type": "PauseArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param PauseArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'pause' if arguments is None: self.arguments = PauseArguments() else: self.arguments = PauseArguments(**arguments) if arguments.__class__ != PauseArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class PauseArguments(BaseSchema): """ Arguments for 'pause' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Pause execution for this thread." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, **kwargs): """ :param integer threadId: Pause execution for this thread. """ self.threadId = threadId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } dct.update(self.kwargs) return dct @register_response('pause') @register class PauseResponse(BaseSchema): """ Response to 'pause' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('stackTrace') @register class StackTraceRequest(BaseSchema): """ The request returns a stacktrace from the current execution state. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "stackTrace" ] }, "arguments": { "type": "StackTraceArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param StackTraceArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'stackTrace' if arguments is None: self.arguments = StackTraceArguments() else: self.arguments = StackTraceArguments(**arguments) if arguments.__class__ != StackTraceArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class StackTraceArguments(BaseSchema): """ Arguments for 'stackTrace' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Retrieve the stacktrace for this thread." }, "startFrame": { "type": "integer", "description": "The index of the first frame to return; if omitted frames start at 0." }, "levels": { "type": "integer", "description": "The maximum number of frames to return. If levels is not specified or 0, all frames are returned." }, "format": { "description": "Specifies details on how to format the stack frames.", "type": "StackFrameFormat" } } __refs__ = set(['format']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, startFrame=None, levels=None, format=None, **kwargs): """ :param integer threadId: Retrieve the stacktrace for this thread. :param integer startFrame: The index of the first frame to return; if omitted frames start at 0. :param integer levels: The maximum number of frames to return. If levels is not specified or 0, all frames are returned. :param StackFrameFormat format: Specifies details on how to format the stack frames. """ self.threadId = threadId self.startFrame = startFrame self.levels = levels if format is None: self.format = StackFrameFormat() else: self.format = StackFrameFormat(**format) if format.__class__ != StackFrameFormat else format self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } if self.startFrame is not None: dct['startFrame'] = self.startFrame if self.levels is not None: dct['levels'] = self.levels if self.format is not None: dct['format'] = self.format.to_dict() dct.update(self.kwargs) return dct @register_response('stackTrace') @register class StackTraceResponse(BaseSchema): """ Response to 'stackTrace' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "stackFrames": { "type": "array", "items": { "$ref": "#/definitions/StackFrame" }, "description": "The frames of the stackframe. If the array has length zero, there are no stackframes available.\nThis means that there is no location information available." }, "totalFrames": { "type": "integer", "description": "The total number of frames available." } }, "required": [ "stackFrames" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param StackTraceResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = StackTraceResponseBody() else: self.body = StackTraceResponseBody(**body) if body.__class__ != StackTraceResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('scopes') @register class ScopesRequest(BaseSchema): """ The request returns the variable scopes for a given stackframe ID. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "scopes" ] }, "arguments": { "type": "ScopesArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param ScopesArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'scopes' if arguments is None: self.arguments = ScopesArguments() else: self.arguments = ScopesArguments(**arguments) if arguments.__class__ != ScopesArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class ScopesArguments(BaseSchema): """ Arguments for 'scopes' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "frameId": { "type": "integer", "description": "Retrieve the scopes for this stackframe." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, frameId, **kwargs): """ :param integer frameId: Retrieve the scopes for this stackframe. """ self.frameId = frameId self.kwargs = kwargs def to_dict(self): dct = { 'frameId': self.frameId, } dct.update(self.kwargs) return dct @register_response('scopes') @register class ScopesResponse(BaseSchema): """ Response to 'scopes' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "scopes": { "type": "array", "items": { "$ref": "#/definitions/Scope" }, "description": "The scopes of the stackframe. If the array has length zero, there are no scopes available." } }, "required": [ "scopes" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param ScopesResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = ScopesResponseBody() else: self.body = ScopesResponseBody(**body) if body.__class__ != ScopesResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('variables') @register class VariablesRequest(BaseSchema): """ Retrieves all child variables for the given variable reference. An optional filter can be used to limit the fetched children to either named or indexed children. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "variables" ] }, "arguments": { "type": "VariablesArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param VariablesArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'variables' if arguments is None: self.arguments = VariablesArguments() else: self.arguments = VariablesArguments(**arguments) if arguments.__class__ != VariablesArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class VariablesArguments(BaseSchema): """ Arguments for 'variables' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "variablesReference": { "type": "integer", "description": "The Variable reference." }, "filter": { "type": "string", "enum": [ "indexed", "named" ], "description": "Optional filter to limit the child variables to either named or indexed. If ommited, both types are fetched." }, "start": { "type": "integer", "description": "The index of the first variable to return; if omitted children start at 0." }, "count": { "type": "integer", "description": "The number of variables to return. If count is missing or 0, all variables are returned." }, "format": { "description": "Specifies details on how to format the Variable values.", "type": "ValueFormat" } } __refs__ = set(['format']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, variablesReference, filter=None, start=None, count=None, format=None, **kwargs): """ :param integer variablesReference: The Variable reference. :param string filter: Optional filter to limit the child variables to either named or indexed. If ommited, both types are fetched. :param integer start: The index of the first variable to return; if omitted children start at 0. :param integer count: The number of variables to return. If count is missing or 0, all variables are returned. :param ValueFormat format: Specifies details on how to format the Variable values. """ self.variablesReference = variablesReference self.filter = filter self.start = start self.count = count if format is None: self.format = ValueFormat() else: self.format = ValueFormat(**format) if format.__class__ != ValueFormat else format self.kwargs = kwargs def to_dict(self): dct = { 'variablesReference': self.variablesReference, } if self.filter is not None: dct['filter'] = self.filter if self.start is not None: dct['start'] = self.start if self.count is not None: dct['count'] = self.count if self.format is not None: dct['format'] = self.format.to_dict() dct.update(self.kwargs) return dct @register_response('variables') @register class VariablesResponse(BaseSchema): """ Response to 'variables' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "variables": { "type": "array", "items": { "$ref": "#/definitions/Variable" }, "description": "All (or a range) of variables for the given variable reference." } }, "required": [ "variables" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param VariablesResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = VariablesResponseBody() else: self.body = VariablesResponseBody(**body) if body.__class__ != VariablesResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('setVariable') @register class SetVariableRequest(BaseSchema): """ Set the variable with the given name in the variable container to a new value. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "setVariable" ] }, "arguments": { "type": "SetVariableArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param SetVariableArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'setVariable' if arguments is None: self.arguments = SetVariableArguments() else: self.arguments = SetVariableArguments(**arguments) if arguments.__class__ != SetVariableArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class SetVariableArguments(BaseSchema): """ Arguments for 'setVariable' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "variablesReference": { "type": "integer", "description": "The reference of the variable container." }, "name": { "type": "string", "description": "The name of the variable." }, "value": { "type": "string", "description": "The value of the variable." }, "format": { "description": "Specifies details on how to format the response value.", "type": "ValueFormat" } } __refs__ = set(['format']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, variablesReference, name, value, format=None, **kwargs): """ :param integer variablesReference: The reference of the variable container. :param string name: The name of the variable. :param string value: The value of the variable. :param ValueFormat format: Specifies details on how to format the response value. """ self.variablesReference = variablesReference self.name = name self.value = value if format is None: self.format = ValueFormat() else: self.format = ValueFormat(**format) if format.__class__ != ValueFormat else format self.kwargs = kwargs def to_dict(self): dct = { 'variablesReference': self.variablesReference, 'name': self.name, 'value': self.value, } if self.format is not None: dct['format'] = self.format.to_dict() dct.update(self.kwargs) return dct @register_response('setVariable') @register class SetVariableResponse(BaseSchema): """ Response to 'setVariable' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "value": { "type": "string", "description": "The new value of the variable." }, "type": { "type": "string", "description": "The type of the new value. Typically shown in the UI when hovering over the value." }, "variablesReference": { "type": "number", "description": "If variablesReference is > 0, the new value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." }, "namedVariables": { "type": "number", "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." }, "indexedVariables": { "type": "number", "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." } }, "required": [ "value" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param SetVariableResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = SetVariableResponseBody() else: self.body = SetVariableResponseBody(**body) if body.__class__ != SetVariableResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('source') @register class SourceRequest(BaseSchema): """ The request retrieves the source code for a given source reference. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "source" ] }, "arguments": { "type": "SourceArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param SourceArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'source' if arguments is None: self.arguments = SourceArguments() else: self.arguments = SourceArguments(**arguments) if arguments.__class__ != SourceArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class SourceArguments(BaseSchema): """ Arguments for 'source' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "source": { "description": "Specifies the source content to load. Either source.path or source.sourceReference must be specified.", "type": "Source" }, "sourceReference": { "type": "integer", "description": "The reference to the source. This is the same as source.sourceReference. This is provided for backward compatibility since old backends do not understand the 'source' attribute." } } __refs__ = set(['source']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, sourceReference, source=None, **kwargs): """ :param integer sourceReference: The reference to the source. This is the same as source.sourceReference. This is provided for backward compatibility since old backends do not understand the 'source' attribute. :param Source source: Specifies the source content to load. Either source.path or source.sourceReference must be specified. """ self.sourceReference = sourceReference if source is None: self.source = Source() else: self.source = Source(**source) if source.__class__ != Source else source self.kwargs = kwargs def to_dict(self): dct = { 'sourceReference': self.sourceReference, } if self.source is not None: dct['source'] = self.source.to_dict() dct.update(self.kwargs) return dct @register_response('source') @register class SourceResponse(BaseSchema): """ Response to 'source' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "content": { "type": "string", "description": "Content of the source reference." }, "mimeType": { "type": "string", "description": "Optional content type (mime type) of the source." } }, "required": [ "content" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param SourceResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = SourceResponseBody() else: self.body = SourceResponseBody(**body) if body.__class__ != SourceResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('threads') @register class ThreadsRequest(BaseSchema): """ The request retrieves a list of all threads. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "threads" ] }, "arguments": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Object containing arguments for the command." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, seq=-1, arguments=None, **kwargs): """ :param string type: :param string command: :param integer seq: Sequence number. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] arguments: Object containing arguments for the command. """ self.type = 'request' self.command = 'threads' self.seq = seq self.arguments = arguments self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'seq': self.seq, } if self.arguments is not None: dct['arguments'] = self.arguments dct.update(self.kwargs) return dct @register_response('threads') @register class ThreadsResponse(BaseSchema): """ Response to 'threads' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "threads": { "type": "array", "items": { "$ref": "#/definitions/Thread" }, "description": "All threads." } }, "required": [ "threads" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param ThreadsResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = ThreadsResponseBody() else: self.body = ThreadsResponseBody(**body) if body.__class__ != ThreadsResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('terminateThreads') @register class TerminateThreadsRequest(BaseSchema): """ The request terminates the threads with the given ids. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "terminateThreads" ] }, "arguments": { "type": "TerminateThreadsArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param TerminateThreadsArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'terminateThreads' if arguments is None: self.arguments = TerminateThreadsArguments() else: self.arguments = TerminateThreadsArguments(**arguments) if arguments.__class__ != TerminateThreadsArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class TerminateThreadsArguments(BaseSchema): """ Arguments for 'terminateThreads' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadIds": { "type": "array", "items": { "type": "integer" }, "description": "Ids of threads to be terminated." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadIds=None, **kwargs): """ :param array threadIds: Ids of threads to be terminated. """ self.threadIds = threadIds self.kwargs = kwargs def to_dict(self): dct = { } if self.threadIds is not None: dct['threadIds'] = self.threadIds dct.update(self.kwargs) return dct @register_response('terminateThreads') @register class TerminateThreadsResponse(BaseSchema): """ Response to 'terminateThreads' request. This is just an acknowledgement, so no body field is required. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Contains request result if success is true and optional error details if success is false." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param integer seq: Sequence number. :param string message: Contains error message if success == false. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command self.seq = seq self.message = message self.body = body self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'seq': self.seq, } if self.message is not None: dct['message'] = self.message if self.body is not None: dct['body'] = self.body dct.update(self.kwargs) return dct @register_request('modules') @register class ModulesRequest(BaseSchema): """ Modules can be retrieved from the debug adapter with the ModulesRequest which can either return all modules or a range of modules to support paging. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "modules" ] }, "arguments": { "type": "ModulesArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param ModulesArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'modules' if arguments is None: self.arguments = ModulesArguments() else: self.arguments = ModulesArguments(**arguments) if arguments.__class__ != ModulesArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class ModulesArguments(BaseSchema): """ Arguments for 'modules' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "startModule": { "type": "integer", "description": "The index of the first module to return; if omitted modules start at 0." }, "moduleCount": { "type": "integer", "description": "The number of modules to return. If moduleCount is not specified or 0, all modules are returned." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, startModule=None, moduleCount=None, **kwargs): """ :param integer startModule: The index of the first module to return; if omitted modules start at 0. :param integer moduleCount: The number of modules to return. If moduleCount is not specified or 0, all modules are returned. """ self.startModule = startModule self.moduleCount = moduleCount self.kwargs = kwargs def to_dict(self): dct = { } if self.startModule is not None: dct['startModule'] = self.startModule if self.moduleCount is not None: dct['moduleCount'] = self.moduleCount dct.update(self.kwargs) return dct @register_response('modules') @register class ModulesResponse(BaseSchema): """ Response to 'modules' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "modules": { "type": "array", "items": { "$ref": "#/definitions/Module" }, "description": "All modules or range of modules." }, "totalModules": { "type": "integer", "description": "The total number of modules available." } }, "required": [ "modules" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param ModulesResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = ModulesResponseBody() else: self.body = ModulesResponseBody(**body) if body.__class__ != ModulesResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('loadedSources') @register class LoadedSourcesRequest(BaseSchema): """ Retrieves the set of all sources currently loaded by the debugged process. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "loadedSources" ] }, "arguments": { "type": "LoadedSourcesArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, seq=-1, arguments=None, **kwargs): """ :param string type: :param string command: :param integer seq: Sequence number. :param LoadedSourcesArguments arguments: """ self.type = 'request' self.command = 'loadedSources' self.seq = seq if arguments is None: self.arguments = LoadedSourcesArguments() else: self.arguments = LoadedSourcesArguments(**arguments) if arguments.__class__ != LoadedSourcesArguments else arguments self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'seq': self.seq, } if self.arguments is not None: dct['arguments'] = self.arguments.to_dict() dct.update(self.kwargs) return dct @register class LoadedSourcesArguments(BaseSchema): """ Arguments for 'loadedSources' request. Note: automatically generated code. Do not edit manually. """ __props__ = {} __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, **kwargs): """ """ self.kwargs = kwargs def to_dict(self): dct = { } dct.update(self.kwargs) return dct @register_response('loadedSources') @register class LoadedSourcesResponse(BaseSchema): """ Response to 'loadedSources' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "sources": { "type": "array", "items": { "$ref": "#/definitions/Source" }, "description": "Set of loaded sources." } }, "required": [ "sources" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param LoadedSourcesResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = LoadedSourcesResponseBody() else: self.body = LoadedSourcesResponseBody(**body) if body.__class__ != LoadedSourcesResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('evaluate') @register class EvaluateRequest(BaseSchema): """ Evaluates the given expression in the context of the top most stack frame. The expression has access to any variables and arguments that are in scope. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "evaluate" ] }, "arguments": { "type": "EvaluateArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param EvaluateArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'evaluate' if arguments is None: self.arguments = EvaluateArguments() else: self.arguments = EvaluateArguments(**arguments) if arguments.__class__ != EvaluateArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class EvaluateArguments(BaseSchema): """ Arguments for 'evaluate' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "expression": { "type": "string", "description": "The expression to evaluate." }, "frameId": { "type": "integer", "description": "Evaluate the expression in the scope of this stack frame. If not specified, the expression is evaluated in the global scope." }, "context": { "type": "string", "_enum": [ "watch", "repl", "hover" ], "enumDescriptions": [ "evaluate is run in a watch.", "evaluate is run from REPL console.", "evaluate is run from a data hover." ], "description": "The context in which the evaluate request is run." }, "format": { "description": "Specifies details on how to format the Evaluate result.", "type": "ValueFormat" } } __refs__ = set(['format']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, expression, frameId=None, context=None, format=None, **kwargs): """ :param string expression: The expression to evaluate. :param integer frameId: Evaluate the expression in the scope of this stack frame. If not specified, the expression is evaluated in the global scope. :param string context: The context in which the evaluate request is run. :param ValueFormat format: Specifies details on how to format the Evaluate result. """ self.expression = expression self.frameId = frameId self.context = context if format is None: self.format = ValueFormat() else: self.format = ValueFormat(**format) if format.__class__ != ValueFormat else format self.kwargs = kwargs def to_dict(self): dct = { 'expression': self.expression, } if self.frameId is not None: dct['frameId'] = self.frameId if self.context is not None: dct['context'] = self.context if self.format is not None: dct['format'] = self.format.to_dict() dct.update(self.kwargs) return dct @register_response('evaluate') @register class EvaluateResponse(BaseSchema): """ Response to 'evaluate' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "result": { "type": "string", "description": "The result of the evaluate request." }, "type": { "type": "string", "description": "The optional type of the evaluate result." }, "presentationHint": { "$ref": "#/definitions/VariablePresentationHint", "description": "Properties of a evaluate result that can be used to determine how to render the result in the UI." }, "variablesReference": { "type": "number", "description": "If variablesReference is > 0, the evaluate result is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." }, "namedVariables": { "type": "number", "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." }, "indexedVariables": { "type": "number", "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." } }, "required": [ "result", "variablesReference" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param EvaluateResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = EvaluateResponseBody() else: self.body = EvaluateResponseBody(**body) if body.__class__ != EvaluateResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('setExpression') @register class SetExpressionRequest(BaseSchema): """ Evaluates the given 'value' expression and assigns it to the 'expression' which must be a modifiable l-value. The expressions have access to any variables and arguments that are in scope of the specified frame. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "setExpression" ] }, "arguments": { "type": "SetExpressionArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param SetExpressionArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'setExpression' if arguments is None: self.arguments = SetExpressionArguments() else: self.arguments = SetExpressionArguments(**arguments) if arguments.__class__ != SetExpressionArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class SetExpressionArguments(BaseSchema): """ Arguments for 'setExpression' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "expression": { "type": "string", "description": "The l-value expression to assign to." }, "value": { "type": "string", "description": "The value expression to assign to the l-value expression." }, "frameId": { "type": "integer", "description": "Evaluate the expressions in the scope of this stack frame. If not specified, the expressions are evaluated in the global scope." }, "format": { "description": "Specifies how the resulting value should be formatted.", "type": "ValueFormat" } } __refs__ = set(['format']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, expression, value, frameId=None, format=None, **kwargs): """ :param string expression: The l-value expression to assign to. :param string value: The value expression to assign to the l-value expression. :param integer frameId: Evaluate the expressions in the scope of this stack frame. If not specified, the expressions are evaluated in the global scope. :param ValueFormat format: Specifies how the resulting value should be formatted. """ self.expression = expression self.value = value self.frameId = frameId if format is None: self.format = ValueFormat() else: self.format = ValueFormat(**format) if format.__class__ != ValueFormat else format self.kwargs = kwargs def to_dict(self): dct = { 'expression': self.expression, 'value': self.value, } if self.frameId is not None: dct['frameId'] = self.frameId if self.format is not None: dct['format'] = self.format.to_dict() dct.update(self.kwargs) return dct @register_response('setExpression') @register class SetExpressionResponse(BaseSchema): """ Response to 'setExpression' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "value": { "type": "string", "description": "The new value of the expression." }, "type": { "type": "string", "description": "The optional type of the value." }, "presentationHint": { "$ref": "#/definitions/VariablePresentationHint", "description": "Properties of a value that can be used to determine how to render the result in the UI." }, "variablesReference": { "type": "number", "description": "If variablesReference is > 0, the value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." }, "namedVariables": { "type": "number", "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." }, "indexedVariables": { "type": "number", "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." } }, "required": [ "value" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param SetExpressionResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = SetExpressionResponseBody() else: self.body = SetExpressionResponseBody(**body) if body.__class__ != SetExpressionResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('stepInTargets') @register class StepInTargetsRequest(BaseSchema): """ This request retrieves the possible stepIn targets for the specified stack frame. These targets can be used in the 'stepIn' request. The StepInTargets may only be called if the 'supportsStepInTargetsRequest' capability exists and is true. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "stepInTargets" ] }, "arguments": { "type": "StepInTargetsArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param StepInTargetsArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'stepInTargets' if arguments is None: self.arguments = StepInTargetsArguments() else: self.arguments = StepInTargetsArguments(**arguments) if arguments.__class__ != StepInTargetsArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class StepInTargetsArguments(BaseSchema): """ Arguments for 'stepInTargets' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "frameId": { "type": "integer", "description": "The stack frame for which to retrieve the possible stepIn targets." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, frameId, **kwargs): """ :param integer frameId: The stack frame for which to retrieve the possible stepIn targets. """ self.frameId = frameId self.kwargs = kwargs def to_dict(self): dct = { 'frameId': self.frameId, } dct.update(self.kwargs) return dct @register_response('stepInTargets') @register class StepInTargetsResponse(BaseSchema): """ Response to 'stepInTargets' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "targets": { "type": "array", "items": { "$ref": "#/definitions/StepInTarget" }, "description": "The possible stepIn targets of the specified source location." } }, "required": [ "targets" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param StepInTargetsResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = StepInTargetsResponseBody() else: self.body = StepInTargetsResponseBody(**body) if body.__class__ != StepInTargetsResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('gotoTargets') @register class GotoTargetsRequest(BaseSchema): """ This request retrieves the possible goto targets for the specified source location. These targets can be used in the 'goto' request. The GotoTargets request may only be called if the 'supportsGotoTargetsRequest' capability exists and is true. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "gotoTargets" ] }, "arguments": { "type": "GotoTargetsArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param GotoTargetsArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'gotoTargets' if arguments is None: self.arguments = GotoTargetsArguments() else: self.arguments = GotoTargetsArguments(**arguments) if arguments.__class__ != GotoTargetsArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class GotoTargetsArguments(BaseSchema): """ Arguments for 'gotoTargets' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "source": { "description": "The source location for which the goto targets are determined.", "type": "Source" }, "line": { "type": "integer", "description": "The line location for which the goto targets are determined." }, "column": { "type": "integer", "description": "An optional column location for which the goto targets are determined." } } __refs__ = set(['source']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, source, line, column=None, **kwargs): """ :param Source source: The source location for which the goto targets are determined. :param integer line: The line location for which the goto targets are determined. :param integer column: An optional column location for which the goto targets are determined. """ if source is None: self.source = Source() else: self.source = Source(**source) if source.__class__ != Source else source self.line = line self.column = column self.kwargs = kwargs def to_dict(self): dct = { 'source': self.source.to_dict(), 'line': self.line, } if self.column is not None: dct['column'] = self.column dct.update(self.kwargs) return dct @register_response('gotoTargets') @register class GotoTargetsResponse(BaseSchema): """ Response to 'gotoTargets' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "targets": { "type": "array", "items": { "$ref": "#/definitions/GotoTarget" }, "description": "The possible goto targets of the specified location." } }, "required": [ "targets" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param GotoTargetsResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = GotoTargetsResponseBody() else: self.body = GotoTargetsResponseBody(**body) if body.__class__ != GotoTargetsResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('completions') @register class CompletionsRequest(BaseSchema): """ Returns a list of possible completions for a given caret position and text. The CompletionsRequest may only be called if the 'supportsCompletionsRequest' capability exists and is true. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "completions" ] }, "arguments": { "type": "CompletionsArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param CompletionsArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'completions' if arguments is None: self.arguments = CompletionsArguments() else: self.arguments = CompletionsArguments(**arguments) if arguments.__class__ != CompletionsArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class CompletionsArguments(BaseSchema): """ Arguments for 'completions' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "frameId": { "type": "integer", "description": "Returns completions in the scope of this stack frame. If not specified, the completions are returned for the global scope." }, "text": { "type": "string", "description": "One or more source lines. Typically this is the text a user has typed into the debug console before he asked for completion." }, "column": { "type": "integer", "description": "The character position for which to determine the completion proposals." }, "line": { "type": "integer", "description": "An optional line for which to determine the completion proposals. If missing the first line of the text is assumed." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, text, column, frameId=None, line=None, **kwargs): """ :param string text: One or more source lines. Typically this is the text a user has typed into the debug console before he asked for completion. :param integer column: The character position for which to determine the completion proposals. :param integer frameId: Returns completions in the scope of this stack frame. If not specified, the completions are returned for the global scope. :param integer line: An optional line for which to determine the completion proposals. If missing the first line of the text is assumed. """ self.text = text self.column = column self.frameId = frameId self.line = line self.kwargs = kwargs def to_dict(self): dct = { 'text': self.text, 'column': self.column, } if self.frameId is not None: dct['frameId'] = self.frameId if self.line is not None: dct['line'] = self.line dct.update(self.kwargs) return dct @register_response('completions') @register class CompletionsResponse(BaseSchema): """ Response to 'completions' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "targets": { "type": "array", "items": { "$ref": "#/definitions/CompletionItem" }, "description": "The possible completions for ." } }, "required": [ "targets" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param CompletionsResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = CompletionsResponseBody() else: self.body = CompletionsResponseBody(**body) if body.__class__ != CompletionsResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register_request('exceptionInfo') @register class ExceptionInfoRequest(BaseSchema): """ Retrieves the details of the exception that caused this event to be raised. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "request" ] }, "command": { "type": "string", "enum": [ "exceptionInfo" ] }, "arguments": { "type": "ExceptionInfoArguments" } } __refs__ = set(['arguments']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, arguments, seq=-1, **kwargs): """ :param string type: :param string command: :param ExceptionInfoArguments arguments: :param integer seq: Sequence number. """ self.type = 'request' self.command = 'exceptionInfo' if arguments is None: self.arguments = ExceptionInfoArguments() else: self.arguments = ExceptionInfoArguments(**arguments) if arguments.__class__ != ExceptionInfoArguments else arguments self.seq = seq self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'command': self.command, 'arguments': self.arguments.to_dict(), 'seq': self.seq, } dct.update(self.kwargs) return dct @register class ExceptionInfoArguments(BaseSchema): """ Arguments for 'exceptionInfo' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "Thread for which exception information should be retrieved." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, **kwargs): """ :param integer threadId: Thread for which exception information should be retrieved. """ self.threadId = threadId self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } dct.update(self.kwargs) return dct @register_response('exceptionInfo') @register class ExceptionInfoResponse(BaseSchema): """ Response to 'exceptionInfo' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "seq": { "type": "integer", "description": "Sequence number." }, "type": { "type": "string", "enum": [ "response" ] }, "request_seq": { "type": "integer", "description": "Sequence number of the corresponding request." }, "success": { "type": "boolean", "description": "Outcome of the request." }, "command": { "type": "string", "description": "The command requested." }, "message": { "type": "string", "description": "Contains error message if success == false." }, "body": { "type": "object", "properties": { "exceptionId": { "type": "string", "description": "ID of the exception that was thrown." }, "description": { "type": "string", "description": "Descriptive text for the exception provided by the debug adapter." }, "breakMode": { "$ref": "#/definitions/ExceptionBreakMode", "description": "Mode that caused the exception notification to be raised." }, "details": { "$ref": "#/definitions/ExceptionDetails", "description": "Detailed information about the exception." } }, "required": [ "exceptionId", "breakMode" ] } } __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, request_seq, success, command, body, seq=-1, message=None, **kwargs): """ :param string type: :param integer request_seq: Sequence number of the corresponding request. :param boolean success: Outcome of the request. :param string command: The command requested. :param ExceptionInfoResponseBody body: :param integer seq: Sequence number. :param string message: Contains error message if success == false. """ self.type = 'response' self.request_seq = request_seq self.success = success self.command = command if body is None: self.body = ExceptionInfoResponseBody() else: self.body = ExceptionInfoResponseBody(**body) if body.__class__ != ExceptionInfoResponseBody else body self.seq = seq self.message = message self.kwargs = kwargs def to_dict(self): dct = { 'type': self.type, 'request_seq': self.request_seq, 'success': self.success, 'command': self.command, 'body': self.body.to_dict(), 'seq': self.seq, } if self.message is not None: dct['message'] = self.message dct.update(self.kwargs) return dct @register class Capabilities(BaseSchema): """ Information about the capabilities of a debug adapter. Note: automatically generated code. Do not edit manually. """ __props__ = { "supportsConfigurationDoneRequest": { "type": "boolean", "description": "The debug adapter supports the 'configurationDone' request." }, "supportsFunctionBreakpoints": { "type": "boolean", "description": "The debug adapter supports function breakpoints." }, "supportsConditionalBreakpoints": { "type": "boolean", "description": "The debug adapter supports conditional breakpoints." }, "supportsHitConditionalBreakpoints": { "type": "boolean", "description": "The debug adapter supports breakpoints that break execution after a specified number of hits." }, "supportsEvaluateForHovers": { "type": "boolean", "description": "The debug adapter supports a (side effect free) evaluate request for data hovers." }, "exceptionBreakpointFilters": { "type": "array", "items": { "$ref": "#/definitions/ExceptionBreakpointsFilter" }, "description": "Available filters or options for the setExceptionBreakpoints request." }, "supportsStepBack": { "type": "boolean", "description": "The debug adapter supports stepping back via the 'stepBack' and 'reverseContinue' requests." }, "supportsSetVariable": { "type": "boolean", "description": "The debug adapter supports setting a variable to a value." }, "supportsRestartFrame": { "type": "boolean", "description": "The debug adapter supports restarting a frame." }, "supportsGotoTargetsRequest": { "type": "boolean", "description": "The debug adapter supports the 'gotoTargets' request." }, "supportsStepInTargetsRequest": { "type": "boolean", "description": "The debug adapter supports the 'stepInTargets' request." }, "supportsCompletionsRequest": { "type": "boolean", "description": "The debug adapter supports the 'completions' request." }, "supportsModulesRequest": { "type": "boolean", "description": "The debug adapter supports the 'modules' request." }, "additionalModuleColumns": { "type": "array", "items": { "$ref": "#/definitions/ColumnDescriptor" }, "description": "The set of additional module information exposed by the debug adapter." }, "supportedChecksumAlgorithms": { "type": "array", "items": { "$ref": "#/definitions/ChecksumAlgorithm" }, "description": "Checksum algorithms supported by the debug adapter." }, "supportsRestartRequest": { "type": "boolean", "description": "The debug adapter supports the 'restart' request. In this case a client should not implement 'restart' by terminating and relaunching the adapter but by calling the RestartRequest." }, "supportsExceptionOptions": { "type": "boolean", "description": "The debug adapter supports 'exceptionOptions' on the setExceptionBreakpoints request." }, "supportsValueFormattingOptions": { "type": "boolean", "description": "The debug adapter supports a 'format' attribute on the stackTraceRequest, variablesRequest, and evaluateRequest." }, "supportsExceptionInfoRequest": { "type": "boolean", "description": "The debug adapter supports the 'exceptionInfo' request." }, "supportTerminateDebuggee": { "type": "boolean", "description": "The debug adapter supports the 'terminateDebuggee' attribute on the 'disconnect' request." }, "supportsDelayedStackTraceLoading": { "type": "boolean", "description": "The debug adapter supports the delayed loading of parts of the stack, which requires that both the 'startFrame' and 'levels' arguments and the 'totalFrames' result of the 'StackTrace' request are supported." }, "supportsLoadedSourcesRequest": { "type": "boolean", "description": "The debug adapter supports the 'loadedSources' request." }, "supportsLogPoints": { "type": "boolean", "description": "The debug adapter supports logpoints by interpreting the 'logMessage' attribute of the SourceBreakpoint." }, "supportsTerminateThreadsRequest": { "type": "boolean", "description": "The debug adapter supports the 'terminateThreads' request." }, "supportsSetExpression": { "type": "boolean", "description": "The debug adapter supports the 'setExpression' request." }, "supportsTerminateRequest": { "type": "boolean", "description": "The debug adapter supports the 'terminate' request." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakpoints=None, supportsConditionalBreakpoints=None, supportsHitConditionalBreakpoints=None, supportsEvaluateForHovers=None, exceptionBreakpointFilters=None, supportsStepBack=None, supportsSetVariable=None, supportsRestartFrame=None, supportsGotoTargetsRequest=None, supportsStepInTargetsRequest=None, supportsCompletionsRequest=None, supportsModulesRequest=None, additionalModuleColumns=None, supportedChecksumAlgorithms=None, supportsRestartRequest=None, supportsExceptionOptions=None, supportsValueFormattingOptions=None, supportsExceptionInfoRequest=None, supportTerminateDebuggee=None, supportsDelayedStackTraceLoading=None, supportsLoadedSourcesRequest=None, supportsLogPoints=None, supportsTerminateThreadsRequest=None, supportsSetExpression=None, supportsTerminateRequest=None, **kwargs): """ :param boolean supportsConfigurationDoneRequest: The debug adapter supports the 'configurationDone' request. :param boolean supportsFunctionBreakpoints: The debug adapter supports function breakpoints. :param boolean supportsConditionalBreakpoints: The debug adapter supports conditional breakpoints. :param boolean supportsHitConditionalBreakpoints: The debug adapter supports breakpoints that break execution after a specified number of hits. :param boolean supportsEvaluateForHovers: The debug adapter supports a (side effect free) evaluate request for data hovers. :param array exceptionBreakpointFilters: Available filters or options for the setExceptionBreakpoints request. :param boolean supportsStepBack: The debug adapter supports stepping back via the 'stepBack' and 'reverseContinue' requests. :param boolean supportsSetVariable: The debug adapter supports setting a variable to a value. :param boolean supportsRestartFrame: The debug adapter supports restarting a frame. :param boolean supportsGotoTargetsRequest: The debug adapter supports the 'gotoTargets' request. :param boolean supportsStepInTargetsRequest: The debug adapter supports the 'stepInTargets' request. :param boolean supportsCompletionsRequest: The debug adapter supports the 'completions' request. :param boolean supportsModulesRequest: The debug adapter supports the 'modules' request. :param array additionalModuleColumns: The set of additional module information exposed by the debug adapter. :param array supportedChecksumAlgorithms: Checksum algorithms supported by the debug adapter. :param boolean supportsRestartRequest: The debug adapter supports the 'restart' request. In this case a client should not implement 'restart' by terminating and relaunching the adapter but by calling the RestartRequest. :param boolean supportsExceptionOptions: The debug adapter supports 'exceptionOptions' on the setExceptionBreakpoints request. :param boolean supportsValueFormattingOptions: The debug adapter supports a 'format' attribute on the stackTraceRequest, variablesRequest, and evaluateRequest. :param boolean supportsExceptionInfoRequest: The debug adapter supports the 'exceptionInfo' request. :param boolean supportTerminateDebuggee: The debug adapter supports the 'terminateDebuggee' attribute on the 'disconnect' request. :param boolean supportsDelayedStackTraceLoading: The debug adapter supports the delayed loading of parts of the stack, which requires that both the 'startFrame' and 'levels' arguments and the 'totalFrames' result of the 'StackTrace' request are supported. :param boolean supportsLoadedSourcesRequest: The debug adapter supports the 'loadedSources' request. :param boolean supportsLogPoints: The debug adapter supports logpoints by interpreting the 'logMessage' attribute of the SourceBreakpoint. :param boolean supportsTerminateThreadsRequest: The debug adapter supports the 'terminateThreads' request. :param boolean supportsSetExpression: The debug adapter supports the 'setExpression' request. :param boolean supportsTerminateRequest: The debug adapter supports the 'terminate' request. """ self.supportsConfigurationDoneRequest = supportsConfigurationDoneRequest self.supportsFunctionBreakpoints = supportsFunctionBreakpoints self.supportsConditionalBreakpoints = supportsConditionalBreakpoints self.supportsHitConditionalBreakpoints = supportsHitConditionalBreakpoints self.supportsEvaluateForHovers = supportsEvaluateForHovers self.exceptionBreakpointFilters = exceptionBreakpointFilters self.supportsStepBack = supportsStepBack self.supportsSetVariable = supportsSetVariable self.supportsRestartFrame = supportsRestartFrame self.supportsGotoTargetsRequest = supportsGotoTargetsRequest self.supportsStepInTargetsRequest = supportsStepInTargetsRequest self.supportsCompletionsRequest = supportsCompletionsRequest self.supportsModulesRequest = supportsModulesRequest self.additionalModuleColumns = additionalModuleColumns self.supportedChecksumAlgorithms = supportedChecksumAlgorithms self.supportsRestartRequest = supportsRestartRequest self.supportsExceptionOptions = supportsExceptionOptions self.supportsValueFormattingOptions = supportsValueFormattingOptions self.supportsExceptionInfoRequest = supportsExceptionInfoRequest self.supportTerminateDebuggee = supportTerminateDebuggee self.supportsDelayedStackTraceLoading = supportsDelayedStackTraceLoading self.supportsLoadedSourcesRequest = supportsLoadedSourcesRequest self.supportsLogPoints = supportsLogPoints self.supportsTerminateThreadsRequest = supportsTerminateThreadsRequest self.supportsSetExpression = supportsSetExpression self.supportsTerminateRequest = supportsTerminateRequest self.kwargs = kwargs def to_dict(self): dct = { } if self.supportsConfigurationDoneRequest is not None: dct['supportsConfigurationDoneRequest'] = self.supportsConfigurationDoneRequest if self.supportsFunctionBreakpoints is not None: dct['supportsFunctionBreakpoints'] = self.supportsFunctionBreakpoints if self.supportsConditionalBreakpoints is not None: dct['supportsConditionalBreakpoints'] = self.supportsConditionalBreakpoints if self.supportsHitConditionalBreakpoints is not None: dct['supportsHitConditionalBreakpoints'] = self.supportsHitConditionalBreakpoints if self.supportsEvaluateForHovers is not None: dct['supportsEvaluateForHovers'] = self.supportsEvaluateForHovers if self.exceptionBreakpointFilters is not None: dct['exceptionBreakpointFilters'] = self.exceptionBreakpointFilters if self.supportsStepBack is not None: dct['supportsStepBack'] = self.supportsStepBack if self.supportsSetVariable is not None: dct['supportsSetVariable'] = self.supportsSetVariable if self.supportsRestartFrame is not None: dct['supportsRestartFrame'] = self.supportsRestartFrame if self.supportsGotoTargetsRequest is not None: dct['supportsGotoTargetsRequest'] = self.supportsGotoTargetsRequest if self.supportsStepInTargetsRequest is not None: dct['supportsStepInTargetsRequest'] = self.supportsStepInTargetsRequest if self.supportsCompletionsRequest is not None: dct['supportsCompletionsRequest'] = self.supportsCompletionsRequest if self.supportsModulesRequest is not None: dct['supportsModulesRequest'] = self.supportsModulesRequest if self.additionalModuleColumns is not None: dct['additionalModuleColumns'] = self.additionalModuleColumns if self.supportedChecksumAlgorithms is not None: dct['supportedChecksumAlgorithms'] = self.supportedChecksumAlgorithms if self.supportsRestartRequest is not None: dct['supportsRestartRequest'] = self.supportsRestartRequest if self.supportsExceptionOptions is not None: dct['supportsExceptionOptions'] = self.supportsExceptionOptions if self.supportsValueFormattingOptions is not None: dct['supportsValueFormattingOptions'] = self.supportsValueFormattingOptions if self.supportsExceptionInfoRequest is not None: dct['supportsExceptionInfoRequest'] = self.supportsExceptionInfoRequest if self.supportTerminateDebuggee is not None: dct['supportTerminateDebuggee'] = self.supportTerminateDebuggee if self.supportsDelayedStackTraceLoading is not None: dct['supportsDelayedStackTraceLoading'] = self.supportsDelayedStackTraceLoading if self.supportsLoadedSourcesRequest is not None: dct['supportsLoadedSourcesRequest'] = self.supportsLoadedSourcesRequest if self.supportsLogPoints is not None: dct['supportsLogPoints'] = self.supportsLogPoints if self.supportsTerminateThreadsRequest is not None: dct['supportsTerminateThreadsRequest'] = self.supportsTerminateThreadsRequest if self.supportsSetExpression is not None: dct['supportsSetExpression'] = self.supportsSetExpression if self.supportsTerminateRequest is not None: dct['supportsTerminateRequest'] = self.supportsTerminateRequest dct.update(self.kwargs) return dct @register class ExceptionBreakpointsFilter(BaseSchema): """ An ExceptionBreakpointsFilter is shown in the UI as an option for configuring how exceptions are dealt with. Note: automatically generated code. Do not edit manually. """ __props__ = { "filter": { "type": "string", "description": "The internal ID of the filter. This value is passed to the setExceptionBreakpoints request." }, "label": { "type": "string", "description": "The name of the filter. This will be shown in the UI." }, "default": { "type": "boolean", "description": "Initial value of the filter. If not specified a value 'false' is assumed." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, filter, label, default=None, **kwargs): """ :param string filter: The internal ID of the filter. This value is passed to the setExceptionBreakpoints request. :param string label: The name of the filter. This will be shown in the UI. :param boolean default: Initial value of the filter. If not specified a value 'false' is assumed. """ self.filter = filter self.label = label self.default = default self.kwargs = kwargs def to_dict(self): dct = { 'filter': self.filter, 'label': self.label, } if self.default is not None: dct['default'] = self.default dct.update(self.kwargs) return dct @register class Message(BaseSchema): """ A structured message object. Used to return errors from requests. Note: automatically generated code. Do not edit manually. """ __props__ = { "id": { "type": "integer", "description": "Unique identifier for the message." }, "format": { "type": "string", "description": "A format string for the message. Embedded variables have the form '{name}'.\nIf variable name starts with an underscore character, the variable does not contain user data (PII) and can be safely used for telemetry purposes." }, "variables": { "type": "object", "description": "An object used as a dictionary for looking up the variables in the format string.", "additionalProperties": { "type": "string", "description": "Values must be strings." } }, "sendTelemetry": { "type": "boolean", "description": "If true send to telemetry." }, "showUser": { "type": "boolean", "description": "If true show user." }, "url": { "type": "string", "description": "An optional url where additional information about this message can be found." }, "urlLabel": { "type": "string", "description": "An optional label that is presented to the user as the UI for opening the url." } } __refs__ = set(['variables']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, id, format, variables=None, sendTelemetry=None, showUser=None, url=None, urlLabel=None, **kwargs): """ :param integer id: Unique identifier for the message. :param string format: A format string for the message. Embedded variables have the form '{name}'. If variable name starts with an underscore character, the variable does not contain user data (PII) and can be safely used for telemetry purposes. :param MessageVariables variables: An object used as a dictionary for looking up the variables in the format string. :param boolean sendTelemetry: If true send to telemetry. :param boolean showUser: If true show user. :param string url: An optional url where additional information about this message can be found. :param string urlLabel: An optional label that is presented to the user as the UI for opening the url. """ self.id = id self.format = format if variables is None: self.variables = MessageVariables() else: self.variables = MessageVariables(**variables) if variables.__class__ != MessageVariables else variables self.sendTelemetry = sendTelemetry self.showUser = showUser self.url = url self.urlLabel = urlLabel self.kwargs = kwargs def to_dict(self): dct = { 'id': self.id, 'format': self.format, } if self.variables is not None: dct['variables'] = self.variables.to_dict() if self.sendTelemetry is not None: dct['sendTelemetry'] = self.sendTelemetry if self.showUser is not None: dct['showUser'] = self.showUser if self.url is not None: dct['url'] = self.url if self.urlLabel is not None: dct['urlLabel'] = self.urlLabel dct.update(self.kwargs) return dct @register class Module(BaseSchema): """ A Module object represents a row in the modules view. Two attributes are mandatory: an id identifies a module in the modules view and is used in a ModuleEvent for identifying a module for adding, updating or deleting. The name is used to minimally render the module in the UI. Additional attributes can be added to the module. They will show up in the module View if they have a corresponding ColumnDescriptor. To avoid an unnecessary proliferation of additional attributes with similar semantics but different names we recommend to re-use attributes from the 'recommended' list below first, and only introduce new attributes if nothing appropriate could be found. Note: automatically generated code. Do not edit manually. """ __props__ = { "id": { "type": [ "integer", "string" ], "description": "Unique identifier for the module." }, "name": { "type": "string", "description": "A name of the module." }, "path": { "type": "string", "description": "optional but recommended attributes.\nalways try to use these first before introducing additional attributes.\n\nLogical full path to the module. The exact definition is implementation defined, but usually this would be a full path to the on-disk file for the module." }, "isOptimized": { "type": "boolean", "description": "True if the module is optimized." }, "isUserCode": { "type": "boolean", "description": "True if the module is considered 'user code' by a debugger that supports 'Just My Code'." }, "version": { "type": "string", "description": "Version of Module." }, "symbolStatus": { "type": "string", "description": "User understandable description of if symbols were found for the module (ex: 'Symbols Loaded', 'Symbols not found', etc." }, "symbolFilePath": { "type": "string", "description": "Logical full path to the symbol file. The exact definition is implementation defined." }, "dateTimeStamp": { "type": "string", "description": "Module created or modified." }, "addressRange": { "type": "string", "description": "Address range covered by this module." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, id, name, path=None, isOptimized=None, isUserCode=None, version=None, symbolStatus=None, symbolFilePath=None, dateTimeStamp=None, addressRange=None, **kwargs): """ :param ['integer', 'string'] id: Unique identifier for the module. :param string name: A name of the module. :param string path: optional but recommended attributes. always try to use these first before introducing additional attributes. Logical full path to the module. The exact definition is implementation defined, but usually this would be a full path to the on-disk file for the module. :param boolean isOptimized: True if the module is optimized. :param boolean isUserCode: True if the module is considered 'user code' by a debugger that supports 'Just My Code'. :param string version: Version of Module. :param string symbolStatus: User understandable description of if symbols were found for the module (ex: 'Symbols Loaded', 'Symbols not found', etc. :param string symbolFilePath: Logical full path to the symbol file. The exact definition is implementation defined. :param string dateTimeStamp: Module created or modified. :param string addressRange: Address range covered by this module. """ self.id = id self.name = name self.path = path self.isOptimized = isOptimized self.isUserCode = isUserCode self.version = version self.symbolStatus = symbolStatus self.symbolFilePath = symbolFilePath self.dateTimeStamp = dateTimeStamp self.addressRange = addressRange self.kwargs = kwargs def to_dict(self): dct = { 'id': self.id, 'name': self.name, } if self.path is not None: dct['path'] = self.path if self.isOptimized is not None: dct['isOptimized'] = self.isOptimized if self.isUserCode is not None: dct['isUserCode'] = self.isUserCode if self.version is not None: dct['version'] = self.version if self.symbolStatus is not None: dct['symbolStatus'] = self.symbolStatus if self.symbolFilePath is not None: dct['symbolFilePath'] = self.symbolFilePath if self.dateTimeStamp is not None: dct['dateTimeStamp'] = self.dateTimeStamp if self.addressRange is not None: dct['addressRange'] = self.addressRange dct.update(self.kwargs) return dct @register class ColumnDescriptor(BaseSchema): """ A ColumnDescriptor specifies what module attribute to show in a column of the ModulesView, how to format it, and what the column's label should be. It is only used if the underlying UI actually supports this level of customization. Note: automatically generated code. Do not edit manually. """ __props__ = { "attributeName": { "type": "string", "description": "Name of the attribute rendered in this column." }, "label": { "type": "string", "description": "Header UI label of column." }, "format": { "type": "string", "description": "Format to use for the rendered values in this column. TBD how the format strings looks like." }, "type": { "type": "string", "enum": [ "string", "number", "boolean", "unixTimestampUTC" ], "description": "Datatype of values in this column. Defaults to 'string' if not specified." }, "width": { "type": "integer", "description": "Width of this column in characters (hint only)." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, attributeName, label, format=None, type=None, width=None, **kwargs): """ :param string attributeName: Name of the attribute rendered in this column. :param string label: Header UI label of column. :param string format: Format to use for the rendered values in this column. TBD how the format strings looks like. :param string type: Datatype of values in this column. Defaults to 'string' if not specified. :param integer width: Width of this column in characters (hint only). """ self.attributeName = attributeName self.label = label self.format = format self.type = type self.width = width self.kwargs = kwargs def to_dict(self): dct = { 'attributeName': self.attributeName, 'label': self.label, } if self.format is not None: dct['format'] = self.format if self.type is not None: dct['type'] = self.type if self.width is not None: dct['width'] = self.width dct.update(self.kwargs) return dct @register class ModulesViewDescriptor(BaseSchema): """ The ModulesViewDescriptor is the container for all declarative configuration options of a ModuleView. For now it only specifies the columns to be shown in the modules view. Note: automatically generated code. Do not edit manually. """ __props__ = { "columns": { "type": "array", "items": { "$ref": "#/definitions/ColumnDescriptor" } } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, columns, **kwargs): """ :param array columns: """ self.columns = columns self.kwargs = kwargs def to_dict(self): dct = { 'columns': self.columns, } dct.update(self.kwargs) return dct @register class Thread(BaseSchema): """ A Thread Note: automatically generated code. Do not edit manually. """ __props__ = { "id": { "type": "integer", "description": "Unique identifier for the thread." }, "name": { "type": "string", "description": "A name of the thread." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, id, name, **kwargs): """ :param integer id: Unique identifier for the thread. :param string name: A name of the thread. """ self.id = id self.name = name self.kwargs = kwargs def to_dict(self): dct = { 'id': self.id, 'name': self.name, } dct.update(self.kwargs) return dct @register class Source(BaseSchema): """ A Source is a descriptor for source code. It is returned from the debug adapter as part of a StackFrame and it is used by clients when specifying breakpoints. Note: automatically generated code. Do not edit manually. """ __props__ = { "name": { "type": "string", "description": "The short name of the source. Every source returned from the debug adapter has a name. When sending a source to the debug adapter this name is optional." }, "path": { "type": "string", "description": "The path of the source to be shown in the UI. It is only used to locate and load the content of the source if no sourceReference is specified (or its value is 0)." }, "sourceReference": { "type": "number", "description": "If sourceReference > 0 the contents of the source must be retrieved through the SourceRequest (even if a path is specified). A sourceReference is only valid for a session, so it must not be used to persist a source." }, "presentationHint": { "type": "string", "description": "An optional hint for how to present the source in the UI. A value of 'deemphasize' can be used to indicate that the source is not available or that it is skipped on stepping.", "enum": [ "normal", "emphasize", "deemphasize" ] }, "origin": { "type": "string", "description": "The (optional) origin of this source: possible values 'internal module', 'inlined content from source map', etc." }, "sources": { "type": "array", "items": { "$ref": "#/definitions/Source" }, "description": "An optional list of sources that are related to this source. These may be the source that generated this source." }, "adapterData": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Optional data that a debug adapter might want to loop through the client. The client should leave the data intact and persist it across sessions. The client should not interpret the data." }, "checksums": { "type": "array", "items": { "$ref": "#/definitions/Checksum" }, "description": "The checksums associated with this file." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, name=None, path=None, sourceReference=None, presentationHint=None, origin=None, sources=None, adapterData=None, checksums=None, **kwargs): """ :param string name: The short name of the source. Every source returned from the debug adapter has a name. When sending a source to the debug adapter this name is optional. :param string path: The path of the source to be shown in the UI. It is only used to locate and load the content of the source if no sourceReference is specified (or its value is 0). :param number sourceReference: If sourceReference > 0 the contents of the source must be retrieved through the SourceRequest (even if a path is specified). A sourceReference is only valid for a session, so it must not be used to persist a source. :param string presentationHint: An optional hint for how to present the source in the UI. A value of 'deemphasize' can be used to indicate that the source is not available or that it is skipped on stepping. :param string origin: The (optional) origin of this source: possible values 'internal module', 'inlined content from source map', etc. :param array sources: An optional list of sources that are related to this source. These may be the source that generated this source. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] adapterData: Optional data that a debug adapter might want to loop through the client. The client should leave the data intact and persist it across sessions. The client should not interpret the data. :param array checksums: The checksums associated with this file. """ self.name = name self.path = path self.sourceReference = sourceReference self.presentationHint = presentationHint self.origin = origin self.sources = sources self.adapterData = adapterData self.checksums = checksums self.kwargs = kwargs def to_dict(self): dct = { } if self.name is not None: dct['name'] = self.name if self.path is not None: dct['path'] = self.path if self.sourceReference is not None: dct['sourceReference'] = self.sourceReference if self.presentationHint is not None: dct['presentationHint'] = self.presentationHint if self.origin is not None: dct['origin'] = self.origin if self.sources is not None: dct['sources'] = self.sources if self.adapterData is not None: dct['adapterData'] = self.adapterData if self.checksums is not None: dct['checksums'] = self.checksums dct.update(self.kwargs) return dct @register class StackFrame(BaseSchema): """ A Stackframe contains the source location. Note: automatically generated code. Do not edit manually. """ __props__ = { "id": { "type": "integer", "description": "An identifier for the stack frame. It must be unique across all threads. This id can be used to retrieve the scopes of the frame with the 'scopesRequest' or to restart the execution of a stackframe." }, "name": { "type": "string", "description": "The name of the stack frame, typically a method name." }, "source": { "description": "The optional source of the frame.", "type": "Source" }, "line": { "type": "integer", "description": "The line within the file of the frame. If source is null or doesn't exist, line is 0 and must be ignored." }, "column": { "type": "integer", "description": "The column within the line. If source is null or doesn't exist, column is 0 and must be ignored." }, "endLine": { "type": "integer", "description": "An optional end line of the range covered by the stack frame." }, "endColumn": { "type": "integer", "description": "An optional end column of the range covered by the stack frame." }, "moduleId": { "type": [ "integer", "string" ], "description": "The module associated with this frame, if any." }, "presentationHint": { "type": "string", "enum": [ "normal", "label", "subtle" ], "description": "An optional hint for how to present this frame in the UI. A value of 'label' can be used to indicate that the frame is an artificial frame that is used as a visual label or separator. A value of 'subtle' can be used to change the appearance of a frame in a 'subtle' way." } } __refs__ = set(['source']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, id, name, line, column, source=None, endLine=None, endColumn=None, moduleId=None, presentationHint=None, **kwargs): """ :param integer id: An identifier for the stack frame. It must be unique across all threads. This id can be used to retrieve the scopes of the frame with the 'scopesRequest' or to restart the execution of a stackframe. :param string name: The name of the stack frame, typically a method name. :param integer line: The line within the file of the frame. If source is null or doesn't exist, line is 0 and must be ignored. :param integer column: The column within the line. If source is null or doesn't exist, column is 0 and must be ignored. :param Source source: The optional source of the frame. :param integer endLine: An optional end line of the range covered by the stack frame. :param integer endColumn: An optional end column of the range covered by the stack frame. :param ['integer', 'string'] moduleId: The module associated with this frame, if any. :param string presentationHint: An optional hint for how to present this frame in the UI. A value of 'label' can be used to indicate that the frame is an artificial frame that is used as a visual label or separator. A value of 'subtle' can be used to change the appearance of a frame in a 'subtle' way. """ self.id = id self.name = name self.line = line self.column = column if source is None: self.source = Source() else: self.source = Source(**source) if source.__class__ != Source else source self.endLine = endLine self.endColumn = endColumn self.moduleId = moduleId self.presentationHint = presentationHint self.kwargs = kwargs def to_dict(self): dct = { 'id': self.id, 'name': self.name, 'line': self.line, 'column': self.column, } if self.source is not None: dct['source'] = self.source.to_dict() if self.endLine is not None: dct['endLine'] = self.endLine if self.endColumn is not None: dct['endColumn'] = self.endColumn if self.moduleId is not None: dct['moduleId'] = self.moduleId if self.presentationHint is not None: dct['presentationHint'] = self.presentationHint dct.update(self.kwargs) return dct @register class Scope(BaseSchema): """ A Scope is a named container for variables. Optionally a scope can map to a source or a range within a source. Note: automatically generated code. Do not edit manually. """ __props__ = { "name": { "type": "string", "description": "Name of the scope such as 'Arguments', 'Locals'." }, "variablesReference": { "type": "integer", "description": "The variables of this scope can be retrieved by passing the value of variablesReference to the VariablesRequest." }, "namedVariables": { "type": "integer", "description": "The number of named variables in this scope.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." }, "indexedVariables": { "type": "integer", "description": "The number of indexed variables in this scope.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." }, "expensive": { "type": "boolean", "description": "If true, the number of variables in this scope is large or expensive to retrieve." }, "source": { "description": "Optional source for this scope.", "type": "Source" }, "line": { "type": "integer", "description": "Optional start line of the range covered by this scope." }, "column": { "type": "integer", "description": "Optional start column of the range covered by this scope." }, "endLine": { "type": "integer", "description": "Optional end line of the range covered by this scope." }, "endColumn": { "type": "integer", "description": "Optional end column of the range covered by this scope." } } __refs__ = set(['source']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, name, variablesReference, expensive, namedVariables=None, indexedVariables=None, source=None, line=None, column=None, endLine=None, endColumn=None, **kwargs): """ :param string name: Name of the scope such as 'Arguments', 'Locals'. :param integer variablesReference: The variables of this scope can be retrieved by passing the value of variablesReference to the VariablesRequest. :param boolean expensive: If true, the number of variables in this scope is large or expensive to retrieve. :param integer namedVariables: The number of named variables in this scope. The client can use this optional information to present the variables in a paged UI and fetch them in chunks. :param integer indexedVariables: The number of indexed variables in this scope. The client can use this optional information to present the variables in a paged UI and fetch them in chunks. :param Source source: Optional source for this scope. :param integer line: Optional start line of the range covered by this scope. :param integer column: Optional start column of the range covered by this scope. :param integer endLine: Optional end line of the range covered by this scope. :param integer endColumn: Optional end column of the range covered by this scope. """ self.name = name self.variablesReference = variablesReference self.expensive = expensive self.namedVariables = namedVariables self.indexedVariables = indexedVariables if source is None: self.source = Source() else: self.source = Source(**source) if source.__class__ != Source else source self.line = line self.column = column self.endLine = endLine self.endColumn = endColumn self.kwargs = kwargs def to_dict(self): dct = { 'name': self.name, 'variablesReference': self.variablesReference, 'expensive': self.expensive, } if self.namedVariables is not None: dct['namedVariables'] = self.namedVariables if self.indexedVariables is not None: dct['indexedVariables'] = self.indexedVariables if self.source is not None: dct['source'] = self.source.to_dict() if self.line is not None: dct['line'] = self.line if self.column is not None: dct['column'] = self.column if self.endLine is not None: dct['endLine'] = self.endLine if self.endColumn is not None: dct['endColumn'] = self.endColumn dct.update(self.kwargs) return dct @register class Variable(BaseSchema): """ A Variable is a name/value pair. Optionally a variable can have a 'type' that is shown if space permits or when hovering over the variable's name. An optional 'kind' is used to render additional properties of the variable, e.g. different icons can be used to indicate that a variable is public or private. If the value is structured (has children), a handle is provided to retrieve the children with the VariablesRequest. If the number of named or indexed children is large, the numbers should be returned via the optional 'namedVariables' and 'indexedVariables' attributes. The client can use this optional information to present the children in a paged UI and fetch them in chunks. Note: automatically generated code. Do not edit manually. """ __props__ = { "name": { "type": "string", "description": "The variable's name." }, "value": { "type": "string", "description": "The variable's value. This can be a multi-line text, e.g. for a function the body of a function." }, "type": { "type": "string", "description": "The type of the variable's value. Typically shown in the UI when hovering over the value." }, "presentationHint": { "description": "Properties of a variable that can be used to determine how to render the variable in the UI.", "type": "VariablePresentationHint" }, "evaluateName": { "type": "string", "description": "Optional evaluatable name of this variable which can be passed to the 'EvaluateRequest' to fetch the variable's value." }, "variablesReference": { "type": "integer", "description": "If variablesReference is > 0, the variable is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." }, "namedVariables": { "type": "integer", "description": "The number of named child variables.\nThe client can use this optional information to present the children in a paged UI and fetch them in chunks." }, "indexedVariables": { "type": "integer", "description": "The number of indexed child variables.\nThe client can use this optional information to present the children in a paged UI and fetch them in chunks." } } __refs__ = set(['presentationHint']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, name, value, variablesReference, type=None, presentationHint=None, evaluateName=None, namedVariables=None, indexedVariables=None, **kwargs): """ :param string name: The variable's name. :param string value: The variable's value. This can be a multi-line text, e.g. for a function the body of a function. :param integer variablesReference: If variablesReference is > 0, the variable is structured and its children can be retrieved by passing variablesReference to the VariablesRequest. :param string type: The type of the variable's value. Typically shown in the UI when hovering over the value. :param VariablePresentationHint presentationHint: Properties of a variable that can be used to determine how to render the variable in the UI. :param string evaluateName: Optional evaluatable name of this variable which can be passed to the 'EvaluateRequest' to fetch the variable's value. :param integer namedVariables: The number of named child variables. The client can use this optional information to present the children in a paged UI and fetch them in chunks. :param integer indexedVariables: The number of indexed child variables. The client can use this optional information to present the children in a paged UI and fetch them in chunks. """ self.name = name self.value = value self.variablesReference = variablesReference self.type = type if presentationHint is None: self.presentationHint = VariablePresentationHint() else: self.presentationHint = VariablePresentationHint(**presentationHint) if presentationHint.__class__ != VariablePresentationHint else presentationHint self.evaluateName = evaluateName self.namedVariables = namedVariables self.indexedVariables = indexedVariables self.kwargs = kwargs def to_dict(self): dct = { 'name': self.name, 'value': self.value, 'variablesReference': self.variablesReference, } if self.type is not None: dct['type'] = self.type if self.presentationHint is not None: dct['presentationHint'] = self.presentationHint.to_dict() if self.evaluateName is not None: dct['evaluateName'] = self.evaluateName if self.namedVariables is not None: dct['namedVariables'] = self.namedVariables if self.indexedVariables is not None: dct['indexedVariables'] = self.indexedVariables dct.update(self.kwargs) return dct @register class VariablePresentationHint(BaseSchema): """ Optional properties of a variable that can be used to determine how to render the variable in the UI. Note: automatically generated code. Do not edit manually. """ __props__ = { "kind": { "description": "The kind of variable. Before introducing additional values, try to use the listed values.", "type": "string", "_enum": [ "property", "method", "class", "data", "event", "baseClass", "innerClass", "interface", "mostDerivedClass", "virtual" ], "enumDescriptions": [ "Indicates that the object is a property.", "Indicates that the object is a method.", "Indicates that the object is a class.", "Indicates that the object is data.", "Indicates that the object is an event.", "Indicates that the object is a base class.", "Indicates that the object is an inner class.", "Indicates that the object is an interface.", "Indicates that the object is the most derived class.", "Indicates that the object is virtual, that means it is a synthetic object introduced by the adapter for rendering purposes, e.g. an index range for large arrays." ] }, "attributes": { "description": "Set of attributes represented as an array of strings. Before introducing additional values, try to use the listed values.", "type": "array", "items": { "type": "string", "_enum": [ "static", "constant", "readOnly", "rawString", "hasObjectId", "canHaveObjectId", "hasSideEffects" ], "enumDescriptions": [ "Indicates that the object is static.", "Indicates that the object is a constant.", "Indicates that the object is read only.", "Indicates that the object is a raw string.", "Indicates that the object can have an Object ID created for it.", "Indicates that the object has an Object ID associated with it.", "Indicates that the evaluation had side effects." ] } }, "visibility": { "description": "Visibility of variable. Before introducing additional values, try to use the listed values.", "type": "string", "_enum": [ "public", "private", "protected", "internal", "final" ] } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, kind=None, attributes=None, visibility=None, **kwargs): """ :param string kind: The kind of variable. Before introducing additional values, try to use the listed values. :param array attributes: Set of attributes represented as an array of strings. Before introducing additional values, try to use the listed values. :param string visibility: Visibility of variable. Before introducing additional values, try to use the listed values. """ self.kind = kind self.attributes = attributes self.visibility = visibility self.kwargs = kwargs def to_dict(self): dct = { } if self.kind is not None: dct['kind'] = self.kind if self.attributes is not None: dct['attributes'] = self.attributes if self.visibility is not None: dct['visibility'] = self.visibility dct.update(self.kwargs) return dct @register class SourceBreakpoint(BaseSchema): """ Properties of a breakpoint or logpoint passed to the setBreakpoints request. Note: automatically generated code. Do not edit manually. """ __props__ = { "line": { "type": "integer", "description": "The source line of the breakpoint or logpoint." }, "column": { "type": "integer", "description": "An optional source column of the breakpoint." }, "condition": { "type": "string", "description": "An optional expression for conditional breakpoints." }, "hitCondition": { "type": "string", "description": "An optional expression that controls how many hits of the breakpoint are ignored. The backend is expected to interpret the expression as needed." }, "logMessage": { "type": "string", "description": "If this attribute exists and is non-empty, the backend must not 'break' (stop) but log the message instead. Expressions within {} are interpolated." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, line, column=None, condition=None, hitCondition=None, logMessage=None, **kwargs): """ :param integer line: The source line of the breakpoint or logpoint. :param integer column: An optional source column of the breakpoint. :param string condition: An optional expression for conditional breakpoints. :param string hitCondition: An optional expression that controls how many hits of the breakpoint are ignored. The backend is expected to interpret the expression as needed. :param string logMessage: If this attribute exists and is non-empty, the backend must not 'break' (stop) but log the message instead. Expressions within {} are interpolated. """ self.line = line self.column = column self.condition = condition self.hitCondition = hitCondition self.logMessage = logMessage self.kwargs = kwargs def to_dict(self): dct = { 'line': self.line, } if self.column is not None: dct['column'] = self.column if self.condition is not None: dct['condition'] = self.condition if self.hitCondition is not None: dct['hitCondition'] = self.hitCondition if self.logMessage is not None: dct['logMessage'] = self.logMessage dct.update(self.kwargs) return dct @register class FunctionBreakpoint(BaseSchema): """ Properties of a breakpoint passed to the setFunctionBreakpoints request. Note: automatically generated code. Do not edit manually. """ __props__ = { "name": { "type": "string", "description": "The name of the function." }, "condition": { "type": "string", "description": "An optional expression for conditional breakpoints." }, "hitCondition": { "type": "string", "description": "An optional expression that controls how many hits of the breakpoint are ignored. The backend is expected to interpret the expression as needed." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, name, condition=None, hitCondition=None, **kwargs): """ :param string name: The name of the function. :param string condition: An optional expression for conditional breakpoints. :param string hitCondition: An optional expression that controls how many hits of the breakpoint are ignored. The backend is expected to interpret the expression as needed. """ self.name = name self.condition = condition self.hitCondition = hitCondition self.kwargs = kwargs def to_dict(self): dct = { 'name': self.name, } if self.condition is not None: dct['condition'] = self.condition if self.hitCondition is not None: dct['hitCondition'] = self.hitCondition dct.update(self.kwargs) return dct @register class Breakpoint(BaseSchema): """ Information about a Breakpoint created in setBreakpoints or setFunctionBreakpoints. Note: automatically generated code. Do not edit manually. """ __props__ = { "id": { "type": "integer", "description": "An optional identifier for the breakpoint. It is needed if breakpoint events are used to update or remove breakpoints." }, "verified": { "type": "boolean", "description": "If true breakpoint could be set (but not necessarily at the desired location)." }, "message": { "type": "string", "description": "An optional message about the state of the breakpoint. This is shown to the user and can be used to explain why a breakpoint could not be verified." }, "source": { "description": "The source where the breakpoint is located.", "type": "Source" }, "line": { "type": "integer", "description": "The start line of the actual range covered by the breakpoint." }, "column": { "type": "integer", "description": "An optional start column of the actual range covered by the breakpoint." }, "endLine": { "type": "integer", "description": "An optional end line of the actual range covered by the breakpoint." }, "endColumn": { "type": "integer", "description": "An optional end column of the actual range covered by the breakpoint. If no end line is given, then the end column is assumed to be in the start line." } } __refs__ = set(['source']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, verified, id=None, message=None, source=None, line=None, column=None, endLine=None, endColumn=None, **kwargs): """ :param boolean verified: If true breakpoint could be set (but not necessarily at the desired location). :param integer id: An optional identifier for the breakpoint. It is needed if breakpoint events are used to update or remove breakpoints. :param string message: An optional message about the state of the breakpoint. This is shown to the user and can be used to explain why a breakpoint could not be verified. :param Source source: The source where the breakpoint is located. :param integer line: The start line of the actual range covered by the breakpoint. :param integer column: An optional start column of the actual range covered by the breakpoint. :param integer endLine: An optional end line of the actual range covered by the breakpoint. :param integer endColumn: An optional end column of the actual range covered by the breakpoint. If no end line is given, then the end column is assumed to be in the start line. """ self.verified = verified self.id = id self.message = message if source is None: self.source = Source() else: self.source = Source(**source) if source.__class__ != Source else source self.line = line self.column = column self.endLine = endLine self.endColumn = endColumn self.kwargs = kwargs def to_dict(self): dct = { 'verified': self.verified, } if self.id is not None: dct['id'] = self.id if self.message is not None: dct['message'] = self.message if self.source is not None: dct['source'] = self.source.to_dict() if self.line is not None: dct['line'] = self.line if self.column is not None: dct['column'] = self.column if self.endLine is not None: dct['endLine'] = self.endLine if self.endColumn is not None: dct['endColumn'] = self.endColumn dct.update(self.kwargs) return dct @register class StepInTarget(BaseSchema): """ A StepInTarget can be used in the 'stepIn' request and determines into which single target the stepIn request should step. Note: automatically generated code. Do not edit manually. """ __props__ = { "id": { "type": "integer", "description": "Unique identifier for a stepIn target." }, "label": { "type": "string", "description": "The name of the stepIn target (shown in the UI)." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, id, label, **kwargs): """ :param integer id: Unique identifier for a stepIn target. :param string label: The name of the stepIn target (shown in the UI). """ self.id = id self.label = label self.kwargs = kwargs def to_dict(self): dct = { 'id': self.id, 'label': self.label, } dct.update(self.kwargs) return dct @register class GotoTarget(BaseSchema): """ A GotoTarget describes a code location that can be used as a target in the 'goto' request. The possible goto targets can be determined via the 'gotoTargets' request. Note: automatically generated code. Do not edit manually. """ __props__ = { "id": { "type": "integer", "description": "Unique identifier for a goto target. This is used in the goto request." }, "label": { "type": "string", "description": "The name of the goto target (shown in the UI)." }, "line": { "type": "integer", "description": "The line of the goto target." }, "column": { "type": "integer", "description": "An optional column of the goto target." }, "endLine": { "type": "integer", "description": "An optional end line of the range covered by the goto target." }, "endColumn": { "type": "integer", "description": "An optional end column of the range covered by the goto target." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, id, label, line, column=None, endLine=None, endColumn=None, **kwargs): """ :param integer id: Unique identifier for a goto target. This is used in the goto request. :param string label: The name of the goto target (shown in the UI). :param integer line: The line of the goto target. :param integer column: An optional column of the goto target. :param integer endLine: An optional end line of the range covered by the goto target. :param integer endColumn: An optional end column of the range covered by the goto target. """ self.id = id self.label = label self.line = line self.column = column self.endLine = endLine self.endColumn = endColumn self.kwargs = kwargs def to_dict(self): dct = { 'id': self.id, 'label': self.label, 'line': self.line, } if self.column is not None: dct['column'] = self.column if self.endLine is not None: dct['endLine'] = self.endLine if self.endColumn is not None: dct['endColumn'] = self.endColumn dct.update(self.kwargs) return dct @register class CompletionItem(BaseSchema): """ CompletionItems are the suggestions returned from the CompletionsRequest. Note: automatically generated code. Do not edit manually. """ __props__ = { "label": { "type": "string", "description": "The label of this completion item. By default this is also the text that is inserted when selecting this completion." }, "text": { "type": "string", "description": "If text is not falsy then it is inserted instead of the label." }, "type": { "description": "The item's type. Typically the client uses this information to render the item in the UI with an icon.", "type": "CompletionItemType" }, "start": { "type": "integer", "description": "This value determines the location (in the CompletionsRequest's 'text' attribute) where the completion text is added.\nIf missing the text is added at the location specified by the CompletionsRequest's 'column' attribute." }, "length": { "type": "integer", "description": "This value determines how many characters are overwritten by the completion text.\nIf missing the value 0 is assumed which results in the completion text being inserted." } } __refs__ = set(['type']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, label, text=None, type=None, start=None, length=None, **kwargs): """ :param string label: The label of this completion item. By default this is also the text that is inserted when selecting this completion. :param string text: If text is not falsy then it is inserted instead of the label. :param CompletionItemType type: The item's type. Typically the client uses this information to render the item in the UI with an icon. :param integer start: This value determines the location (in the CompletionsRequest's 'text' attribute) where the completion text is added. If missing the text is added at the location specified by the CompletionsRequest's 'column' attribute. :param integer length: This value determines how many characters are overwritten by the completion text. If missing the value 0 is assumed which results in the completion text being inserted. """ self.label = label self.text = text assert type in CompletionItemType.VALID_VALUES self.type = type self.start = start self.length = length self.kwargs = kwargs def to_dict(self): dct = { 'label': self.label, } if self.text is not None: dct['text'] = self.text if self.type is not None: dct['type'] = self.type if self.start is not None: dct['start'] = self.start if self.length is not None: dct['length'] = self.length dct.update(self.kwargs) return dct @register class CompletionItemType(BaseSchema): """ Some predefined types for the CompletionItem. Please note that not all clients have specific icons for all of them. Note: automatically generated code. Do not edit manually. """ METHOD = 'method' FUNCTION = 'function' CONSTRUCTOR = 'constructor' FIELD = 'field' VARIABLE = 'variable' CLASS = 'class' INTERFACE = 'interface' MODULE = 'module' PROPERTY = 'property' UNIT = 'unit' VALUE = 'value' ENUM = 'enum' KEYWORD = 'keyword' SNIPPET = 'snippet' TEXT = 'text' COLOR = 'color' FILE = 'file' REFERENCE = 'reference' CUSTOMCOLOR = 'customcolor' VALID_VALUES = set(['method', 'function', 'constructor', 'field', 'variable', 'class', 'interface', 'module', 'property', 'unit', 'value', 'enum', 'keyword', 'snippet', 'text', 'color', 'file', 'reference', 'customcolor']) __props__ = {} __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, **kwargs): """ """ self.kwargs = kwargs def to_dict(self): dct = { } dct.update(self.kwargs) return dct @register class ChecksumAlgorithm(BaseSchema): """ Names of checksum algorithms that may be supported by a debug adapter. Note: automatically generated code. Do not edit manually. """ MD5 = 'MD5' SHA1 = 'SHA1' SHA256 = 'SHA256' TIMESTAMP = 'timestamp' VALID_VALUES = set(['MD5', 'SHA1', 'SHA256', 'timestamp']) __props__ = {} __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, **kwargs): """ """ self.kwargs = kwargs def to_dict(self): dct = { } dct.update(self.kwargs) return dct @register class Checksum(BaseSchema): """ The checksum of an item calculated by the specified algorithm. Note: automatically generated code. Do not edit manually. """ __props__ = { "algorithm": { "description": "The algorithm used to calculate this checksum.", "type": "ChecksumAlgorithm" }, "checksum": { "type": "string", "description": "Value of the checksum." } } __refs__ = set(['algorithm']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, algorithm, checksum, **kwargs): """ :param ChecksumAlgorithm algorithm: The algorithm used to calculate this checksum. :param string checksum: Value of the checksum. """ assert algorithm in ChecksumAlgorithm.VALID_VALUES self.algorithm = algorithm self.checksum = checksum self.kwargs = kwargs def to_dict(self): dct = { 'algorithm': self.algorithm, 'checksum': self.checksum, } dct.update(self.kwargs) return dct @register class ValueFormat(BaseSchema): """ Provides formatting information for a value. Note: automatically generated code. Do not edit manually. """ __props__ = { "hex": { "type": "boolean", "description": "Display the value in hex." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, hex=None, **kwargs): """ :param boolean hex: Display the value in hex. """ self.hex = hex self.kwargs = kwargs def to_dict(self): dct = { } if self.hex is not None: dct['hex'] = self.hex dct.update(self.kwargs) return dct @register class StackFrameFormat(BaseSchema): """ Provides formatting information for a stack frame. Note: automatically generated code. Do not edit manually. """ __props__ = { "hex": { "type": "boolean", "description": "Display the value in hex." }, "parameters": { "type": "boolean", "description": "Displays parameters for the stack frame." }, "parameterTypes": { "type": "boolean", "description": "Displays the types of parameters for the stack frame." }, "parameterNames": { "type": "boolean", "description": "Displays the names of parameters for the stack frame." }, "parameterValues": { "type": "boolean", "description": "Displays the values of parameters for the stack frame." }, "line": { "type": "boolean", "description": "Displays the line number of the stack frame." }, "module": { "type": "boolean", "description": "Displays the module of the stack frame." }, "includeAll": { "type": "boolean", "description": "Includes all stack frames, including those the debug adapter might otherwise hide." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, hex=None, parameters=None, parameterTypes=None, parameterNames=None, parameterValues=None, line=None, module=None, includeAll=None, **kwargs): """ :param boolean hex: Display the value in hex. :param boolean parameters: Displays parameters for the stack frame. :param boolean parameterTypes: Displays the types of parameters for the stack frame. :param boolean parameterNames: Displays the names of parameters for the stack frame. :param boolean parameterValues: Displays the values of parameters for the stack frame. :param boolean line: Displays the line number of the stack frame. :param boolean module: Displays the module of the stack frame. :param boolean includeAll: Includes all stack frames, including those the debug adapter might otherwise hide. """ self.hex = hex self.parameters = parameters self.parameterTypes = parameterTypes self.parameterNames = parameterNames self.parameterValues = parameterValues self.line = line self.module = module self.includeAll = includeAll self.kwargs = kwargs def to_dict(self): dct = { } if self.hex is not None: dct['hex'] = self.hex if self.parameters is not None: dct['parameters'] = self.parameters if self.parameterTypes is not None: dct['parameterTypes'] = self.parameterTypes if self.parameterNames is not None: dct['parameterNames'] = self.parameterNames if self.parameterValues is not None: dct['parameterValues'] = self.parameterValues if self.line is not None: dct['line'] = self.line if self.module is not None: dct['module'] = self.module if self.includeAll is not None: dct['includeAll'] = self.includeAll dct.update(self.kwargs) return dct @register class ExceptionOptions(BaseSchema): """ An ExceptionOptions assigns configuration options to a set of exceptions. Note: automatically generated code. Do not edit manually. """ __props__ = { "path": { "type": "array", "items": { "$ref": "#/definitions/ExceptionPathSegment" }, "description": "A path that selects a single or multiple exceptions in a tree. If 'path' is missing, the whole tree is selected. By convention the first segment of the path is a category that is used to group exceptions in the UI." }, "breakMode": { "description": "Condition when a thrown exception should result in a break.", "type": "ExceptionBreakMode" } } __refs__ = set(['breakMode']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, breakMode, path=None, **kwargs): """ :param ExceptionBreakMode breakMode: Condition when a thrown exception should result in a break. :param array path: A path that selects a single or multiple exceptions in a tree. If 'path' is missing, the whole tree is selected. By convention the first segment of the path is a category that is used to group exceptions in the UI. """ assert breakMode in ExceptionBreakMode.VALID_VALUES self.breakMode = breakMode self.path = path self.kwargs = kwargs def to_dict(self): dct = { 'breakMode': self.breakMode, } if self.path is not None: dct['path'] = self.path dct.update(self.kwargs) return dct @register class ExceptionBreakMode(BaseSchema): """ This enumeration defines all possible conditions when a thrown exception should result in a break. never: never breaks, always: always breaks, unhandled: breaks when excpetion unhandled, userUnhandled: breaks if the exception is not handled by user code. Note: automatically generated code. Do not edit manually. """ NEVER = 'never' ALWAYS = 'always' UNHANDLED = 'unhandled' USERUNHANDLED = 'userUnhandled' VALID_VALUES = set(['never', 'always', 'unhandled', 'userUnhandled']) __props__ = {} __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, **kwargs): """ """ self.kwargs = kwargs def to_dict(self): dct = { } dct.update(self.kwargs) return dct @register class ExceptionPathSegment(BaseSchema): """ An ExceptionPathSegment represents a segment in a path that is used to match leafs or nodes in a tree of exceptions. If a segment consists of more than one name, it matches the names provided if 'negate' is false or missing or it matches anything except the names provided if 'negate' is true. Note: automatically generated code. Do not edit manually. """ __props__ = { "negate": { "type": "boolean", "description": "If false or missing this segment matches the names provided, otherwise it matches anything except the names provided." }, "names": { "type": "array", "items": { "type": "string" }, "description": "Depending on the value of 'negate' the names that should match or not match." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, names, negate=None, **kwargs): """ :param array names: Depending on the value of 'negate' the names that should match or not match. :param boolean negate: If false or missing this segment matches the names provided, otherwise it matches anything except the names provided. """ self.names = names self.negate = negate self.kwargs = kwargs def to_dict(self): dct = { 'names': self.names, } if self.negate is not None: dct['negate'] = self.negate dct.update(self.kwargs) return dct @register class ExceptionDetails(BaseSchema): """ Detailed information about an exception that has occurred. Note: automatically generated code. Do not edit manually. """ __props__ = { "message": { "type": "string", "description": "Message contained in the exception." }, "typeName": { "type": "string", "description": "Short type name of the exception object." }, "fullTypeName": { "type": "string", "description": "Fully-qualified type name of the exception object." }, "evaluateName": { "type": "string", "description": "Optional expression that can be evaluated in the current scope to obtain the exception object." }, "stackTrace": { "type": "string", "description": "Stack trace at the time the exception was thrown." }, "innerException": { "type": "array", "items": { "$ref": "#/definitions/ExceptionDetails" }, "description": "Details of the exception contained by this exception, if any." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, message=None, typeName=None, fullTypeName=None, evaluateName=None, stackTrace=None, innerException=None, **kwargs): """ :param string message: Message contained in the exception. :param string typeName: Short type name of the exception object. :param string fullTypeName: Fully-qualified type name of the exception object. :param string evaluateName: Optional expression that can be evaluated in the current scope to obtain the exception object. :param string stackTrace: Stack trace at the time the exception was thrown. :param array innerException: Details of the exception contained by this exception, if any. """ self.message = message self.typeName = typeName self.fullTypeName = fullTypeName self.evaluateName = evaluateName self.stackTrace = stackTrace self.innerException = innerException self.kwargs = kwargs def to_dict(self): dct = { } if self.message is not None: dct['message'] = self.message if self.typeName is not None: dct['typeName'] = self.typeName if self.fullTypeName is not None: dct['fullTypeName'] = self.fullTypeName if self.evaluateName is not None: dct['evaluateName'] = self.evaluateName if self.stackTrace is not None: dct['stackTrace'] = self.stackTrace if self.innerException is not None: dct['innerException'] = self.innerException dct.update(self.kwargs) return dct @register class ErrorResponseBody(BaseSchema): """ "body" of ErrorResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "error": { "description": "An optional, structured error message.", "type": "Message" } } __refs__ = set(['error']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, error=None, **kwargs): """ :param Message error: An optional, structured error message. """ if error is None: self.error = Message() else: self.error = Message(**error) if error.__class__ != Message else error self.kwargs = kwargs def to_dict(self): dct = { } if self.error is not None: dct['error'] = self.error.to_dict() dct.update(self.kwargs) return dct @register class StoppedEventBody(BaseSchema): """ "body" of StoppedEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "reason": { "type": "string", "description": "The reason for the event.\nFor backward compatibility this string is shown in the UI if the 'description' attribute is missing (but it must not be translated).", "_enum": [ "step", "breakpoint", "exception", "pause", "entry", "goto" ] }, "description": { "type": "string", "description": "The full reason for the event, e.g. 'Paused on exception'. This string is shown in the UI as is and must be translated." }, "threadId": { "type": "integer", "description": "The thread which was stopped." }, "preserveFocusHint": { "type": "boolean", "description": "A value of true hints to the frontend that this event should not change the focus." }, "text": { "type": "string", "description": "Additional information. E.g. if reason is 'exception', text contains the exception name. This string is shown in the UI." }, "allThreadsStopped": { "type": "boolean", "description": "If 'allThreadsStopped' is true, a debug adapter can announce that all threads have stopped.\n- The client should use this information to enable that all threads can be expanded to access their stacktraces.\n- If the attribute is missing or false, only the thread with the given threadId can be expanded." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, reason, description=None, threadId=None, preserveFocusHint=None, text=None, allThreadsStopped=None, **kwargs): """ :param string reason: The reason for the event. For backward compatibility this string is shown in the UI if the 'description' attribute is missing (but it must not be translated). :param string description: The full reason for the event, e.g. 'Paused on exception'. This string is shown in the UI as is and must be translated. :param integer threadId: The thread which was stopped. :param boolean preserveFocusHint: A value of true hints to the frontend that this event should not change the focus. :param string text: Additional information. E.g. if reason is 'exception', text contains the exception name. This string is shown in the UI. :param boolean allThreadsStopped: If 'allThreadsStopped' is true, a debug adapter can announce that all threads have stopped. - The client should use this information to enable that all threads can be expanded to access their stacktraces. - If the attribute is missing or false, only the thread with the given threadId can be expanded. """ self.reason = reason self.description = description self.threadId = threadId self.preserveFocusHint = preserveFocusHint self.text = text self.allThreadsStopped = allThreadsStopped self.kwargs = kwargs def to_dict(self): dct = { 'reason': self.reason, } if self.description is not None: dct['description'] = self.description if self.threadId is not None: dct['threadId'] = self.threadId if self.preserveFocusHint is not None: dct['preserveFocusHint'] = self.preserveFocusHint if self.text is not None: dct['text'] = self.text if self.allThreadsStopped is not None: dct['allThreadsStopped'] = self.allThreadsStopped dct.update(self.kwargs) return dct @register class ContinuedEventBody(BaseSchema): """ "body" of ContinuedEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "threadId": { "type": "integer", "description": "The thread which was continued." }, "allThreadsContinued": { "type": "boolean", "description": "If 'allThreadsContinued' is true, a debug adapter can announce that all threads have continued." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threadId, allThreadsContinued=None, **kwargs): """ :param integer threadId: The thread which was continued. :param boolean allThreadsContinued: If 'allThreadsContinued' is true, a debug adapter can announce that all threads have continued. """ self.threadId = threadId self.allThreadsContinued = allThreadsContinued self.kwargs = kwargs def to_dict(self): dct = { 'threadId': self.threadId, } if self.allThreadsContinued is not None: dct['allThreadsContinued'] = self.allThreadsContinued dct.update(self.kwargs) return dct @register class ExitedEventBody(BaseSchema): """ "body" of ExitedEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "exitCode": { "type": "integer", "description": "The exit code returned from the debuggee." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, exitCode, **kwargs): """ :param integer exitCode: The exit code returned from the debuggee. """ self.exitCode = exitCode self.kwargs = kwargs def to_dict(self): dct = { 'exitCode': self.exitCode, } dct.update(self.kwargs) return dct @register class TerminatedEventBody(BaseSchema): """ "body" of TerminatedEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "restart": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "A debug adapter may set 'restart' to true (or to an arbitrary object) to request that the front end restarts the session.\nThe value is not interpreted by the client and passed unmodified as an attribute '__restart' to the 'launch' and 'attach' requests." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, restart=None, **kwargs): """ :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] restart: A debug adapter may set 'restart' to true (or to an arbitrary object) to request that the front end restarts the session. The value is not interpreted by the client and passed unmodified as an attribute '__restart' to the 'launch' and 'attach' requests. """ self.restart = restart self.kwargs = kwargs def to_dict(self): dct = { } if self.restart is not None: dct['restart'] = self.restart dct.update(self.kwargs) return dct @register class ThreadEventBody(BaseSchema): """ "body" of ThreadEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "reason": { "type": "string", "description": "The reason for the event.", "_enum": [ "started", "exited" ] }, "threadId": { "type": "integer", "description": "The identifier of the thread." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, reason, threadId, **kwargs): """ :param string reason: The reason for the event. :param integer threadId: The identifier of the thread. """ self.reason = reason self.threadId = threadId self.kwargs = kwargs def to_dict(self): dct = { 'reason': self.reason, 'threadId': self.threadId, } dct.update(self.kwargs) return dct @register class OutputEventBody(BaseSchema): """ "body" of OutputEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "category": { "type": "string", "description": "The output category. If not specified, 'console' is assumed.", "_enum": [ "console", "stdout", "stderr", "telemetry" ] }, "output": { "type": "string", "description": "The output to report." }, "variablesReference": { "type": "number", "description": "If an attribute 'variablesReference' exists and its value is > 0, the output contains objects which can be retrieved by passing 'variablesReference' to the 'variables' request." }, "source": { "description": "An optional source location where the output was produced.", "type": "Source" }, "line": { "type": "integer", "description": "An optional source location line where the output was produced." }, "column": { "type": "integer", "description": "An optional source location column where the output was produced." }, "data": { "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], "description": "Optional data to report. For the 'telemetry' category the data will be sent to telemetry, for the other categories the data is shown in JSON format." } } __refs__ = set(['source']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, output, category=None, variablesReference=None, source=None, line=None, column=None, data=None, **kwargs): """ :param string output: The output to report. :param string category: The output category. If not specified, 'console' is assumed. :param number variablesReference: If an attribute 'variablesReference' exists and its value is > 0, the output contains objects which can be retrieved by passing 'variablesReference' to the 'variables' request. :param Source source: An optional source location where the output was produced. :param integer line: An optional source location line where the output was produced. :param integer column: An optional source location column where the output was produced. :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] data: Optional data to report. For the 'telemetry' category the data will be sent to telemetry, for the other categories the data is shown in JSON format. """ self.output = output self.category = category self.variablesReference = variablesReference if source is None: self.source = Source() else: self.source = Source(**source) if source.__class__ != Source else source self.line = line self.column = column self.data = data self.kwargs = kwargs def to_dict(self): dct = { 'output': self.output, } if self.category is not None: dct['category'] = self.category if self.variablesReference is not None: dct['variablesReference'] = self.variablesReference if self.source is not None: dct['source'] = self.source.to_dict() if self.line is not None: dct['line'] = self.line if self.column is not None: dct['column'] = self.column if self.data is not None: dct['data'] = self.data dct.update(self.kwargs) return dct @register class BreakpointEventBody(BaseSchema): """ "body" of BreakpointEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "reason": { "type": "string", "description": "The reason for the event.", "_enum": [ "changed", "new", "removed" ] }, "breakpoint": { "description": "The 'id' attribute is used to find the target breakpoint and the other attributes are used as the new values.", "type": "Breakpoint" } } __refs__ = set(['breakpoint']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, reason, breakpoint, **kwargs): """ :param string reason: The reason for the event. :param Breakpoint breakpoint: The 'id' attribute is used to find the target breakpoint and the other attributes are used as the new values. """ self.reason = reason if breakpoint is None: self.breakpoint = Breakpoint() else: self.breakpoint = Breakpoint(**breakpoint) if breakpoint.__class__ != Breakpoint else breakpoint self.kwargs = kwargs def to_dict(self): dct = { 'reason': self.reason, 'breakpoint': self.breakpoint.to_dict(), } dct.update(self.kwargs) return dct @register class ModuleEventBody(BaseSchema): """ "body" of ModuleEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "reason": { "type": "string", "description": "The reason for the event.", "enum": [ "new", "changed", "removed" ] }, "module": { "description": "The new, changed, or removed module. In case of 'removed' only the module id is used.", "type": "Module" } } __refs__ = set(['module']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, reason, module, **kwargs): """ :param string reason: The reason for the event. :param Module module: The new, changed, or removed module. In case of 'removed' only the module id is used. """ self.reason = reason if module is None: self.module = Module() else: self.module = Module(**module) if module.__class__ != Module else module self.kwargs = kwargs def to_dict(self): dct = { 'reason': self.reason, 'module': self.module.to_dict(), } dct.update(self.kwargs) return dct @register class LoadedSourceEventBody(BaseSchema): """ "body" of LoadedSourceEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "reason": { "type": "string", "description": "The reason for the event.", "enum": [ "new", "changed", "removed" ] }, "source": { "description": "The new, changed, or removed source.", "type": "Source" } } __refs__ = set(['source']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, reason, source, **kwargs): """ :param string reason: The reason for the event. :param Source source: The new, changed, or removed source. """ self.reason = reason if source is None: self.source = Source() else: self.source = Source(**source) if source.__class__ != Source else source self.kwargs = kwargs def to_dict(self): dct = { 'reason': self.reason, 'source': self.source.to_dict(), } dct.update(self.kwargs) return dct @register class ProcessEventBody(BaseSchema): """ "body" of ProcessEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "name": { "type": "string", "description": "The logical name of the process. This is usually the full path to process's executable file. Example: /home/example/myproj/program.js." }, "systemProcessId": { "type": "integer", "description": "The system process id of the debugged process. This property will be missing for non-system processes." }, "isLocalProcess": { "type": "boolean", "description": "If true, the process is running on the same computer as the debug adapter." }, "startMethod": { "type": "string", "enum": [ "launch", "attach", "attachForSuspendedLaunch" ], "description": "Describes how the debug engine started debugging this process.", "enumDescriptions": [ "Process was launched under the debugger.", "Debugger attached to an existing process.", "A project launcher component has launched a new process in a suspended state and then asked the debugger to attach." ] } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, name, systemProcessId=None, isLocalProcess=None, startMethod=None, **kwargs): """ :param string name: The logical name of the process. This is usually the full path to process's executable file. Example: /home/example/myproj/program.js. :param integer systemProcessId: The system process id of the debugged process. This property will be missing for non-system processes. :param boolean isLocalProcess: If true, the process is running on the same computer as the debug adapter. :param string startMethod: Describes how the debug engine started debugging this process. """ self.name = name self.systemProcessId = systemProcessId self.isLocalProcess = isLocalProcess self.startMethod = startMethod self.kwargs = kwargs def to_dict(self): dct = { 'name': self.name, } if self.systemProcessId is not None: dct['systemProcessId'] = self.systemProcessId if self.isLocalProcess is not None: dct['isLocalProcess'] = self.isLocalProcess if self.startMethod is not None: dct['startMethod'] = self.startMethod dct.update(self.kwargs) return dct @register class CapabilitiesEventBody(BaseSchema): """ "body" of CapabilitiesEvent Note: automatically generated code. Do not edit manually. """ __props__ = { "capabilities": { "description": "The set of updated capabilities.", "type": "Capabilities" } } __refs__ = set(['capabilities']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, capabilities, **kwargs): """ :param Capabilities capabilities: The set of updated capabilities. """ if capabilities is None: self.capabilities = Capabilities() else: self.capabilities = Capabilities(**capabilities) if capabilities.__class__ != Capabilities else capabilities self.kwargs = kwargs def to_dict(self): dct = { 'capabilities': self.capabilities.to_dict(), } dct.update(self.kwargs) return dct @register class RunInTerminalRequestArgumentsEnv(BaseSchema): """ "env" of RunInTerminalRequestArguments Note: automatically generated code. Do not edit manually. """ __props__ = {} __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, **kwargs): """ """ self.kwargs = kwargs def to_dict(self): dct = { } dct.update(self.kwargs) return dct @register class RunInTerminalResponseBody(BaseSchema): """ "body" of RunInTerminalResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "processId": { "type": "number", "description": "The process ID." }, "shellProcessId": { "type": "number", "description": "The process ID of the terminal shell." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, processId=None, shellProcessId=None, **kwargs): """ :param number processId: The process ID. :param number shellProcessId: The process ID of the terminal shell. """ self.processId = processId self.shellProcessId = shellProcessId self.kwargs = kwargs def to_dict(self): dct = { } if self.processId is not None: dct['processId'] = self.processId if self.shellProcessId is not None: dct['shellProcessId'] = self.shellProcessId dct.update(self.kwargs) return dct @register class SetBreakpointsResponseBody(BaseSchema): """ "body" of SetBreakpointsResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "breakpoints": { "type": "array", "items": { "$ref": "#/definitions/Breakpoint" }, "description": "Information about the breakpoints. The array elements are in the same order as the elements of the 'breakpoints' (or the deprecated 'lines') array in the arguments." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, breakpoints, **kwargs): """ :param array breakpoints: Information about the breakpoints. The array elements are in the same order as the elements of the 'breakpoints' (or the deprecated 'lines') array in the arguments. """ self.breakpoints = breakpoints self.kwargs = kwargs def to_dict(self): dct = { 'breakpoints': self.breakpoints, } dct.update(self.kwargs) return dct @register class SetFunctionBreakpointsResponseBody(BaseSchema): """ "body" of SetFunctionBreakpointsResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "breakpoints": { "type": "array", "items": { "$ref": "#/definitions/Breakpoint" }, "description": "Information about the breakpoints. The array elements correspond to the elements of the 'breakpoints' array." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, breakpoints, **kwargs): """ :param array breakpoints: Information about the breakpoints. The array elements correspond to the elements of the 'breakpoints' array. """ self.breakpoints = breakpoints self.kwargs = kwargs def to_dict(self): dct = { 'breakpoints': self.breakpoints, } dct.update(self.kwargs) return dct @register class ContinueResponseBody(BaseSchema): """ "body" of ContinueResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "allThreadsContinued": { "type": "boolean", "description": "If true, the 'continue' request has ignored the specified thread and continued all threads instead. If this attribute is missing a value of 'true' is assumed for backward compatibility." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, allThreadsContinued=None, **kwargs): """ :param boolean allThreadsContinued: If true, the 'continue' request has ignored the specified thread and continued all threads instead. If this attribute is missing a value of 'true' is assumed for backward compatibility. """ self.allThreadsContinued = allThreadsContinued self.kwargs = kwargs def to_dict(self): dct = { } if self.allThreadsContinued is not None: dct['allThreadsContinued'] = self.allThreadsContinued dct.update(self.kwargs) return dct @register class StackTraceResponseBody(BaseSchema): """ "body" of StackTraceResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "stackFrames": { "type": "array", "items": { "$ref": "#/definitions/StackFrame" }, "description": "The frames of the stackframe. If the array has length zero, there are no stackframes available.\nThis means that there is no location information available." }, "totalFrames": { "type": "integer", "description": "The total number of frames available." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, stackFrames, totalFrames=None, **kwargs): """ :param array stackFrames: The frames of the stackframe. If the array has length zero, there are no stackframes available. This means that there is no location information available. :param integer totalFrames: The total number of frames available. """ self.stackFrames = stackFrames self.totalFrames = totalFrames self.kwargs = kwargs def to_dict(self): dct = { 'stackFrames': self.stackFrames, } if self.totalFrames is not None: dct['totalFrames'] = self.totalFrames dct.update(self.kwargs) return dct @register class ScopesResponseBody(BaseSchema): """ "body" of ScopesResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "scopes": { "type": "array", "items": { "$ref": "#/definitions/Scope" }, "description": "The scopes of the stackframe. If the array has length zero, there are no scopes available." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, scopes, **kwargs): """ :param array scopes: The scopes of the stackframe. If the array has length zero, there are no scopes available. """ self.scopes = scopes self.kwargs = kwargs def to_dict(self): dct = { 'scopes': self.scopes, } dct.update(self.kwargs) return dct @register class VariablesResponseBody(BaseSchema): """ "body" of VariablesResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "variables": { "type": "array", "items": { "$ref": "#/definitions/Variable" }, "description": "All (or a range) of variables for the given variable reference." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, variables, **kwargs): """ :param array variables: All (or a range) of variables for the given variable reference. """ self.variables = variables self.kwargs = kwargs def to_dict(self): dct = { 'variables': self.variables, } dct.update(self.kwargs) return dct @register class SetVariableResponseBody(BaseSchema): """ "body" of SetVariableResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "value": { "type": "string", "description": "The new value of the variable." }, "type": { "type": "string", "description": "The type of the new value. Typically shown in the UI when hovering over the value." }, "variablesReference": { "type": "number", "description": "If variablesReference is > 0, the new value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." }, "namedVariables": { "type": "number", "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." }, "indexedVariables": { "type": "number", "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, value, type=None, variablesReference=None, namedVariables=None, indexedVariables=None, **kwargs): """ :param string value: The new value of the variable. :param string type: The type of the new value. Typically shown in the UI when hovering over the value. :param number variablesReference: If variablesReference is > 0, the new value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest. :param number namedVariables: The number of named child variables. The client can use this optional information to present the variables in a paged UI and fetch them in chunks. :param number indexedVariables: The number of indexed child variables. The client can use this optional information to present the variables in a paged UI and fetch them in chunks. """ self.value = value self.type = type self.variablesReference = variablesReference self.namedVariables = namedVariables self.indexedVariables = indexedVariables self.kwargs = kwargs def to_dict(self): dct = { 'value': self.value, } if self.type is not None: dct['type'] = self.type if self.variablesReference is not None: dct['variablesReference'] = self.variablesReference if self.namedVariables is not None: dct['namedVariables'] = self.namedVariables if self.indexedVariables is not None: dct['indexedVariables'] = self.indexedVariables dct.update(self.kwargs) return dct @register class SourceResponseBody(BaseSchema): """ "body" of SourceResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "content": { "type": "string", "description": "Content of the source reference." }, "mimeType": { "type": "string", "description": "Optional content type (mime type) of the source." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, content, mimeType=None, **kwargs): """ :param string content: Content of the source reference. :param string mimeType: Optional content type (mime type) of the source. """ self.content = content self.mimeType = mimeType self.kwargs = kwargs def to_dict(self): dct = { 'content': self.content, } if self.mimeType is not None: dct['mimeType'] = self.mimeType dct.update(self.kwargs) return dct @register class ThreadsResponseBody(BaseSchema): """ "body" of ThreadsResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "threads": { "type": "array", "items": { "$ref": "#/definitions/Thread" }, "description": "All threads." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, threads, **kwargs): """ :param array threads: All threads. """ self.threads = threads self.kwargs = kwargs def to_dict(self): dct = { 'threads': self.threads, } dct.update(self.kwargs) return dct @register class ModulesResponseBody(BaseSchema): """ "body" of ModulesResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "modules": { "type": "array", "items": { "$ref": "#/definitions/Module" }, "description": "All modules or range of modules." }, "totalModules": { "type": "integer", "description": "The total number of modules available." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, modules, totalModules=None, **kwargs): """ :param array modules: All modules or range of modules. :param integer totalModules: The total number of modules available. """ self.modules = modules self.totalModules = totalModules self.kwargs = kwargs def to_dict(self): dct = { 'modules': self.modules, } if self.totalModules is not None: dct['totalModules'] = self.totalModules dct.update(self.kwargs) return dct @register class LoadedSourcesResponseBody(BaseSchema): """ "body" of LoadedSourcesResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "sources": { "type": "array", "items": { "$ref": "#/definitions/Source" }, "description": "Set of loaded sources." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, sources, **kwargs): """ :param array sources: Set of loaded sources. """ self.sources = sources self.kwargs = kwargs def to_dict(self): dct = { 'sources': self.sources, } dct.update(self.kwargs) return dct @register class EvaluateResponseBody(BaseSchema): """ "body" of EvaluateResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "result": { "type": "string", "description": "The result of the evaluate request." }, "type": { "type": "string", "description": "The optional type of the evaluate result." }, "presentationHint": { "description": "Properties of a evaluate result that can be used to determine how to render the result in the UI.", "type": "VariablePresentationHint" }, "variablesReference": { "type": "number", "description": "If variablesReference is > 0, the evaluate result is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." }, "namedVariables": { "type": "number", "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." }, "indexedVariables": { "type": "number", "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." } } __refs__ = set(['presentationHint']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, result, variablesReference, type=None, presentationHint=None, namedVariables=None, indexedVariables=None, **kwargs): """ :param string result: The result of the evaluate request. :param number variablesReference: If variablesReference is > 0, the evaluate result is structured and its children can be retrieved by passing variablesReference to the VariablesRequest. :param string type: The optional type of the evaluate result. :param VariablePresentationHint presentationHint: Properties of a evaluate result that can be used to determine how to render the result in the UI. :param number namedVariables: The number of named child variables. The client can use this optional information to present the variables in a paged UI and fetch them in chunks. :param number indexedVariables: The number of indexed child variables. The client can use this optional information to present the variables in a paged UI and fetch them in chunks. """ self.result = result self.variablesReference = variablesReference self.type = type if presentationHint is None: self.presentationHint = VariablePresentationHint() else: self.presentationHint = VariablePresentationHint(**presentationHint) if presentationHint.__class__ != VariablePresentationHint else presentationHint self.namedVariables = namedVariables self.indexedVariables = indexedVariables self.kwargs = kwargs def to_dict(self): dct = { 'result': self.result, 'variablesReference': self.variablesReference, } if self.type is not None: dct['type'] = self.type if self.presentationHint is not None: dct['presentationHint'] = self.presentationHint.to_dict() if self.namedVariables is not None: dct['namedVariables'] = self.namedVariables if self.indexedVariables is not None: dct['indexedVariables'] = self.indexedVariables dct.update(self.kwargs) return dct @register class SetExpressionResponseBody(BaseSchema): """ "body" of SetExpressionResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "value": { "type": "string", "description": "The new value of the expression." }, "type": { "type": "string", "description": "The optional type of the value." }, "presentationHint": { "description": "Properties of a value that can be used to determine how to render the result in the UI.", "type": "VariablePresentationHint" }, "variablesReference": { "type": "number", "description": "If variablesReference is > 0, the value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." }, "namedVariables": { "type": "number", "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." }, "indexedVariables": { "type": "number", "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." } } __refs__ = set(['presentationHint']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, value, type=None, presentationHint=None, variablesReference=None, namedVariables=None, indexedVariables=None, **kwargs): """ :param string value: The new value of the expression. :param string type: The optional type of the value. :param VariablePresentationHint presentationHint: Properties of a value that can be used to determine how to render the result in the UI. :param number variablesReference: If variablesReference is > 0, the value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest. :param number namedVariables: The number of named child variables. The client can use this optional information to present the variables in a paged UI and fetch them in chunks. :param number indexedVariables: The number of indexed child variables. The client can use this optional information to present the variables in a paged UI and fetch them in chunks. """ self.value = value self.type = type if presentationHint is None: self.presentationHint = VariablePresentationHint() else: self.presentationHint = VariablePresentationHint(**presentationHint) if presentationHint.__class__ != VariablePresentationHint else presentationHint self.variablesReference = variablesReference self.namedVariables = namedVariables self.indexedVariables = indexedVariables self.kwargs = kwargs def to_dict(self): dct = { 'value': self.value, } if self.type is not None: dct['type'] = self.type if self.presentationHint is not None: dct['presentationHint'] = self.presentationHint.to_dict() if self.variablesReference is not None: dct['variablesReference'] = self.variablesReference if self.namedVariables is not None: dct['namedVariables'] = self.namedVariables if self.indexedVariables is not None: dct['indexedVariables'] = self.indexedVariables dct.update(self.kwargs) return dct @register class StepInTargetsResponseBody(BaseSchema): """ "body" of StepInTargetsResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "targets": { "type": "array", "items": { "$ref": "#/definitions/StepInTarget" }, "description": "The possible stepIn targets of the specified source location." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, targets, **kwargs): """ :param array targets: The possible stepIn targets of the specified source location. """ self.targets = targets self.kwargs = kwargs def to_dict(self): dct = { 'targets': self.targets, } dct.update(self.kwargs) return dct @register class GotoTargetsResponseBody(BaseSchema): """ "body" of GotoTargetsResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "targets": { "type": "array", "items": { "$ref": "#/definitions/GotoTarget" }, "description": "The possible goto targets of the specified location." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, targets, **kwargs): """ :param array targets: The possible goto targets of the specified location. """ self.targets = targets self.kwargs = kwargs def to_dict(self): dct = { 'targets': self.targets, } dct.update(self.kwargs) return dct @register class CompletionsResponseBody(BaseSchema): """ "body" of CompletionsResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "targets": { "type": "array", "items": { "$ref": "#/definitions/CompletionItem" }, "description": "The possible completions for ." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, targets, **kwargs): """ :param array targets: The possible completions for . """ self.targets = targets self.kwargs = kwargs def to_dict(self): dct = { 'targets': self.targets, } dct.update(self.kwargs) return dct @register class ExceptionInfoResponseBody(BaseSchema): """ "body" of ExceptionInfoResponse Note: automatically generated code. Do not edit manually. """ __props__ = { "exceptionId": { "type": "string", "description": "ID of the exception that was thrown." }, "description": { "type": "string", "description": "Descriptive text for the exception provided by the debug adapter." }, "breakMode": { "description": "Mode that caused the exception notification to be raised.", "type": "ExceptionBreakMode" }, "details": { "description": "Detailed information about the exception.", "type": "ExceptionDetails" } } __refs__ = set(['breakMode', 'details']) __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, exceptionId, breakMode, description=None, details=None, **kwargs): """ :param string exceptionId: ID of the exception that was thrown. :param ExceptionBreakMode breakMode: Mode that caused the exception notification to be raised. :param string description: Descriptive text for the exception provided by the debug adapter. :param ExceptionDetails details: Detailed information about the exception. """ self.exceptionId = exceptionId assert breakMode in ExceptionBreakMode.VALID_VALUES self.breakMode = breakMode self.description = description if details is None: self.details = ExceptionDetails() else: self.details = ExceptionDetails(**details) if details.__class__ != ExceptionDetails else details self.kwargs = kwargs def to_dict(self): dct = { 'exceptionId': self.exceptionId, 'breakMode': self.breakMode, } if self.description is not None: dct['description'] = self.description if self.details is not None: dct['details'] = self.details.to_dict() dct.update(self.kwargs) return dct @register class MessageVariables(BaseSchema): """ "variables" of Message Note: automatically generated code. Do not edit manually. """ __props__ = {} __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] def __init__(self, **kwargs): """ """ self.kwargs = kwargs def to_dict(self): dct = { } dct.update(self.kwargs) return dct
32.629067
877
0.5337
4a14efb96af78592e8d871428f668e063e6578ed
2,187
py
Python
examples/pytorch-lightning/mnist.py
msarahan/guildai
99bdd09683291dbc206b6dde1b327d47401d29eb
[ "Apache-2.0" ]
694
2018-11-30T01:06:30.000Z
2022-03-31T14:46:26.000Z
examples/pytorch-lightning/mnist.py
msarahan/guildai
99bdd09683291dbc206b6dde1b327d47401d29eb
[ "Apache-2.0" ]
323
2018-11-05T17:44:34.000Z
2022-03-31T16:56:41.000Z
examples/pytorch-lightning/mnist.py
msarahan/guildai
99bdd09683291dbc206b6dde1b327d47401d29eb
[ "Apache-2.0" ]
68
2019-04-01T04:24:47.000Z
2022-02-24T17:22:04.000Z
import os import argparse import torch from torch import nn from torch.nn import functional as F from torch.optim import Adam from torch.utils.data import DataLoader, random_split from torchvision import datasets, transforms from torchvision.datasets import MNIST from pytorch_lightning import Trainer from pytorch_lightning.core.lightning import LightningModule class LitMNIST(LightningModule): def __init__(self, args): super().__init__() self.args = args # mnist images are (1, 28, 28) (channels, height, width) self.layer_1 = nn.Linear(28 * 28, 128) self.layer_2 = nn.Linear(128, 256) self.layer_3 = nn.Linear(256, 10) def forward(self, x): batch_size, channels, height, width = x.size() # (b, 1, 28, 28) -> (b, 1*28*28) x = x.view(batch_size, -1) x = self.layer_1(x) x = F.relu(x) x = self.layer_2(x) x = F.relu(x) x = self.layer_3(x) x = F.log_softmax(x, dim=1) return x def training_step(self, batch, batch_idx): x, y = batch logits = self(x) loss = F.nll_loss(logits, y) self.log( "my_loss", loss, on_step=True, on_epoch=True, prog_bar=True, logger=True ) return loss def configure_optimizers(self): return Adam(self.parameters(), lr=self.args.lr) @staticmethod def add_model_specific_args(parent_parser): parser = parent_parser.add_argument_group("LitModel") parser.add_argument( "--lr", type=float, default=1e-3, help="Learning rate for the Adam optimizer", ) return parent_parser parser = LitMNIST.add_model_specific_args(argparse.ArgumentParser()) parser = Trainer.add_argparse_args(parser) args = parser.parse_args() transform = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] ) mnist_train = MNIST(os.getcwd(), train=True, download=True, transform=transform) mnist_train = DataLoader(mnist_train, batch_size=64) model = LitMNIST(args) trainer = Trainer.from_argparse_args(args) trainer.fit(model, mnist_train)
27
84
0.65112
4a14efddec57d33c8d8d98d7c2594710dd813ee8
1,317
py
Python
tools/Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/utils/tensor_utils.py
Carles-Figuerola/Vitis-AI
fc043ea4aca1f9fe4e18962e6a6ae397812bb34b
[ "Apache-2.0" ]
1
2021-04-01T06:38:48.000Z
2021-04-01T06:38:48.000Z
tools/Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/utils/tensor_utils.py
cy333/Vitis-AI
611b82cfc32ea2fe04491432bf8feed1f378c9de
[ "Apache-2.0" ]
null
null
null
tools/Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/utils/tensor_utils.py
cy333/Vitis-AI
611b82cfc32ea2fe04491432bf8feed1f378c9de
[ "Apache-2.0" ]
null
null
null
# # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import nndct_shared.utils.tensor_util as tensor_util from nndct_shared.base import FrameworkType def param_to_nndct_format(tensor): tensor_util.convert_parameter_tensor_format( tensor, FrameworkType.TORCH, FrameworkType.NNDCT) def param_to_torch_format(tensor): tensor_util.convert_parameter_tensor_format( tensor, FrameworkType.NNDCT, FrameworkType.TORCH) def blob_to_nndct_format(tensor): tensor_util.convert_blob_tensor_format(tensor, FrameworkType.TORCH, FrameworkType.NNDCT) def blob_to_torch_format(tensor): tensor_util.convert_blob_tensor_format(tensor, FrameworkType.NNDCT, FrameworkType.TORCH)
29.266667
74
0.735004
4a14f0112628b9ffcedba24ec2f43b61580280aa
2,230
py
Python
core/domain/audit_validators.py
sf11047/oppia
dc7410cf9f36e1f357a847d744ba2241a670cb15
[ "Apache-2.0" ]
null
null
null
core/domain/audit_validators.py
sf11047/oppia
dc7410cf9f36e1f357a847d744ba2241a670cb15
[ "Apache-2.0" ]
null
null
null
core/domain/audit_validators.py
sf11047/oppia
dc7410cf9f36e1f357a847d744ba2241a670cb15
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 # # Copyright 2020 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Validators for audit models.""" from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules from core.domain import base_model_validators from core.platform import models user_models = models.Registry.import_models([models.NAMES.user]) class RoleQueryAuditModelValidator(base_model_validators.BaseModelValidator): """Class for validating RoleQueryAuditModels.""" @classmethod def _get_model_id_regex(cls, item): # Valid id: [user_id].[timestamp_in_sec].[intent].[random_number] regex_string = '^%s\\.\\d+\\.%s\\.\\d+$' % (item.user_id, item.intent) return regex_string @classmethod def _get_external_id_relationships(cls, item): return [ base_model_validators.ExternalModelFetcherDetails( 'user_ids', user_models.UserSettingsModel, [item.user_id])] class UsernameChangeAuditModelValidator( base_model_validators.BaseModelValidator): """Class for validating UsernameChangeAuditModels.""" @classmethod def _get_model_id_regex(cls, item): # Valid id: [committer_id].[timestamp_in_sec] # committer_id refers to the user that is making the change. regex_string = '^%s\\.\\d+$' % item.committer_id return regex_string @classmethod def _get_external_id_relationships(cls, item): return [ base_model_validators.ExternalModelFetcherDetails( 'committer_ids', user_models.UserSettingsModel, [item.committer_id])]
35.396825
78
0.720628
4a14f18a0f66914e8f51bf8c104d432803b7d0ba
4,436
py
Python
lib/jnpr/healthbot/swagger/models/devicegroup_schema_authentication_password.py
Juniper/healthbot-py-client
49f0884b5d01ac8430aa7ed4c9acb4e7a2b717a6
[ "Apache-2.0" ]
10
2019-10-23T12:54:37.000Z
2022-02-07T19:24:30.000Z
lib/jnpr/healthbot/swagger/models/devicegroup_schema_authentication_password.py
Juniper/healthbot-py-client
49f0884b5d01ac8430aa7ed4c9acb4e7a2b717a6
[ "Apache-2.0" ]
5
2019-09-30T04:29:25.000Z
2022-02-16T12:21:06.000Z
docs/jnpr_healthbot_swagger/swagger_client/models/devicegroup_schema_authentication_password.py
Juniper/healthbot-py-client
49f0884b5d01ac8430aa7ed4c9acb4e7a2b717a6
[ "Apache-2.0" ]
4
2019-09-30T01:17:48.000Z
2020-08-25T07:27:54.000Z
# coding: utf-8 """ Paragon Insights APIs API interface for PI application # noqa: E501 OpenAPI spec version: 4.0.0 Contact: healthbot-feedback@juniper.net Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six class DevicegroupSchemaAuthenticationPassword(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'password': 'str', 'username': 'str' } attribute_map = { 'password': 'password', 'username': 'username' } def __init__(self, password=None, username=None): # noqa: E501 """DevicegroupSchemaAuthenticationPassword - a model defined in Swagger""" # noqa: E501 self._password = None self._username = None self.discriminator = None self.password = password self.username = username @property def password(self): """Gets the password of this DevicegroupSchemaAuthenticationPassword. # noqa: E501 Password for authentication # noqa: E501 :return: The password of this DevicegroupSchemaAuthenticationPassword. # noqa: E501 :rtype: str """ return self._password @password.setter def password(self, password): """Sets the password of this DevicegroupSchemaAuthenticationPassword. Password for authentication # noqa: E501 :param password: The password of this DevicegroupSchemaAuthenticationPassword. # noqa: E501 :type: str """ if password is None: raise ValueError("Invalid value for `password`, must not be `None`") # noqa: E501 self._password = password @property def username(self): """Gets the username of this DevicegroupSchemaAuthenticationPassword. # noqa: E501 Username for authentication # noqa: E501 :return: The username of this DevicegroupSchemaAuthenticationPassword. # noqa: E501 :rtype: str """ return self._username @username.setter def username(self, username): """Sets the username of this DevicegroupSchemaAuthenticationPassword. Username for authentication # noqa: E501 :param username: The username of this DevicegroupSchemaAuthenticationPassword. # noqa: E501 :type: str """ if username is None: raise ValueError("Invalid value for `username`, must not be `None`") # noqa: E501 self._username = username def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(DevicegroupSchemaAuthenticationPassword, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, DevicegroupSchemaAuthenticationPassword): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
29.972973
100
0.599188
4a14f3c8e8e555794c85f9b4c0335bedcbc9ed67
17,339
py
Python
heat/tests/test_wsgi.py
NeCTAR-RC/heat
b152817f192a7b46514793633ddc968c1fe1ebf8
[ "Apache-2.0" ]
1
2015-02-26T03:23:23.000Z
2015-02-26T03:23:23.000Z
heat/tests/test_wsgi.py
NeCTAR-RC/heat
b152817f192a7b46514793633ddc968c1fe1ebf8
[ "Apache-2.0" ]
null
null
null
heat/tests/test_wsgi.py
NeCTAR-RC/heat
b152817f192a7b46514793633ddc968c1fe1ebf8
[ "Apache-2.0" ]
null
null
null
# Copyright 2010-2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import json from oslo.config import cfg import stubout import webob from heat.common import exception from heat.common import wsgi from heat.tests.common import HeatTestCase class RequestTest(HeatTestCase): def setUp(self): self.stubs = stubout.StubOutForTesting() super(RequestTest, self).setUp() def test_content_type_missing(self): request = wsgi.Request.blank('/tests/123') self.assertRaises(exception.InvalidContentType, request.get_content_type, ('application/xml')) def test_content_type_unsupported(self): request = wsgi.Request.blank('/tests/123') request.headers["Content-Type"] = "text/html" self.assertRaises(exception.InvalidContentType, request.get_content_type, ('application/xml')) def test_content_type_with_charset(self): request = wsgi.Request.blank('/tests/123') request.headers["Content-Type"] = "application/json; charset=UTF-8" result = request.get_content_type(('application/json')) self.assertEqual("application/json", result) def test_content_type_from_accept_xml(self): request = wsgi.Request.blank('/tests/123') request.headers["Accept"] = "application/xml" result = request.best_match_content_type() self.assertEqual("application/json", result) def test_content_type_from_accept_json(self): request = wsgi.Request.blank('/tests/123') request.headers["Accept"] = "application/json" result = request.best_match_content_type() self.assertEqual("application/json", result) def test_content_type_from_accept_xml_json(self): request = wsgi.Request.blank('/tests/123') request.headers["Accept"] = "application/xml, application/json" result = request.best_match_content_type() self.assertEqual("application/json", result) def test_content_type_from_accept_json_xml_quality(self): request = wsgi.Request.blank('/tests/123') request.headers["Accept"] = ("application/json; q=0.3, " "application/xml; q=0.9") result = request.best_match_content_type() self.assertEqual("application/json", result) def test_content_type_accept_default(self): request = wsgi.Request.blank('/tests/123.unsupported') request.headers["Accept"] = "application/unsupported1" result = request.best_match_content_type() self.assertEqual("application/json", result) def test_best_match_language(self): # Test that we are actually invoking language negotiation by webop request = wsgi.Request.blank('/') accepted = 'unknown-lang' request.headers = {'Accept-Language': accepted} def fake_best_match(self, offers, default_match=None): # Best match on an unknown locale returns None return None self.stubs.SmartSet(request.accept_language, 'best_match', fake_best_match) self.assertIsNone(request.best_match_language()) # If Accept-Language is missing or empty, match should be None request.headers = {'Accept-Language': ''} self.assertIsNone(request.best_match_language()) request.headers.pop('Accept-Language') self.assertIsNone(request.best_match_language()) class ResourceTest(HeatTestCase): def setUp(self): self.stubs = stubout.StubOutForTesting() super(ResourceTest, self).setUp() def test_get_action_args(self): env = { 'wsgiorg.routing_args': [ None, { 'controller': None, 'format': None, 'action': 'update', 'id': 12, }, ], } expected = {'action': 'update', 'id': 12} actual = wsgi.Resource(None, None, None).get_action_args(env) self.assertEqual(expected, actual) def test_get_action_args_invalid_index(self): env = {'wsgiorg.routing_args': []} expected = {} actual = wsgi.Resource(None, None, None).get_action_args(env) self.assertEqual(expected, actual) def test_get_action_args_del_controller_error(self): actions = {'format': None, 'action': 'update', 'id': 12} env = {'wsgiorg.routing_args': [None, actions]} expected = {'action': 'update', 'id': 12} actual = wsgi.Resource(None, None, None).get_action_args(env) self.assertEqual(expected, actual) def test_get_action_args_del_format_error(self): actions = {'action': 'update', 'id': 12} env = {'wsgiorg.routing_args': [None, actions]} expected = {'action': 'update', 'id': 12} actual = wsgi.Resource(None, None, None).get_action_args(env) self.assertEqual(expected, actual) def test_dispatch(self): class Controller(object): def index(self, shirt, pants=None): return (shirt, pants) resource = wsgi.Resource(None, None, None) actual = resource.dispatch(Controller(), 'index', 'on', pants='off') expected = ('on', 'off') self.assertEqual(expected, actual) def test_dispatch_default(self): class Controller(object): def default(self, shirt, pants=None): return (shirt, pants) resource = wsgi.Resource(None, None, None) actual = resource.dispatch(Controller(), 'index', 'on', pants='off') expected = ('on', 'off') self.assertEqual(expected, actual) def test_dispatch_no_default(self): class Controller(object): def show(self, shirt, pants=None): return (shirt, pants) resource = wsgi.Resource(None, None, None) self.assertRaises(AttributeError, resource.dispatch, Controller(), 'index', 'on', pants='off') def test_resource_call_error_handle(self): class Controller(object): def delete(self, req, identity): return (req, identity) actions = {'action': 'delete', 'id': 12, 'body': 'data'} env = {'wsgiorg.routing_args': [None, actions]} request = wsgi.Request.blank('/tests/123', environ=env) request.body = '{"foo" : "value"}' resource = wsgi.Resource(Controller(), wsgi.JSONRequestDeserializer(), None) # The Resource does not throw webob.HTTPExceptions, since they # would be considered responses by wsgi and the request flow would end, # instead they are wrapped so they can reach the fault application # where they are converted to a nice JSON/XML response e = self.assertRaises(exception.HTTPExceptionDisguise, resource, request) self.assertIsInstance(e.exc, webob.exc.HTTPBadRequest) def test_resource_call_error_handle_localized(self): class Controller(object): def delete(self, req, identity): return (req, identity) actions = {'action': 'delete', 'id': 12, 'body': 'data'} env = {'wsgiorg.routing_args': [None, actions]} request = wsgi.Request.blank('/tests/123', environ=env) request.body = '{"foo" : "value"}' message_es = "No Encontrado" translated_ex = webob.exc.HTTPBadRequest(message_es) resource = wsgi.Resource(Controller(), wsgi.JSONRequestDeserializer(), None) def fake_translate_exception(ex, locale): return translated_ex self.stubs.SmartSet(wsgi, 'translate_exception', fake_translate_exception) e = self.assertRaises(exception.HTTPExceptionDisguise, resource, request) self.assertEqual(message_es, str(e.exc)) self.m.VerifyAll() class ResourceExceptionHandlingTest(HeatTestCase): scenarios = [ ('client_exceptions', dict( exception=exception.StackResourceLimitExceeded, exception_catch=exception.StackResourceLimitExceeded)), ('webob_bad_request', dict( exception=webob.exc.HTTPBadRequest, exception_catch=exception.HTTPExceptionDisguise)), ('webob_not_found', dict( exception=webob.exc.HTTPNotFound, exception_catch=exception.HTTPExceptionDisguise)), ] def test_resource_client_exceptions_dont_log_error(self): class Controller(object): def __init__(self, excpetion_to_raise): self.excpetion_to_raise = excpetion_to_raise def raise_exception(self, req, body): raise self.excpetion_to_raise() actions = {'action': 'raise_exception', 'body': 'data'} env = {'wsgiorg.routing_args': [None, actions]} request = wsgi.Request.blank('/tests/123', environ=env) request.body = '{"foo" : "value"}' resource = wsgi.Resource(Controller(self.exception), wsgi.JSONRequestDeserializer(), None) e = self.assertRaises(self.exception_catch, resource, request) e = e.exc if hasattr(e, 'exc') else e self.assertNotIn(str(e), self.logger.output) class JSONResponseSerializerTest(HeatTestCase): def test_to_json(self): fixture = {"key": "value"} expected = '{"key": "value"}' actual = wsgi.JSONResponseSerializer().to_json(fixture) self.assertEqual(expected, actual) def test_to_json_with_date_format_value(self): fixture = {"date": datetime.datetime(1, 3, 8, 2)} expected = '{"date": "0001-03-08T02:00:00"}' actual = wsgi.JSONResponseSerializer().to_json(fixture) self.assertEqual(expected, actual) def test_to_json_with_more_deep_format(self): fixture = {"is_public": True, "name": [{"name1": "test"}]} expected = '{"is_public": true, "name": [{"name1": "test"}]}' actual = wsgi.JSONResponseSerializer().to_json(fixture) self.assertEqual(expected, actual) def test_default(self): fixture = {"key": "value"} response = webob.Response() wsgi.JSONResponseSerializer().default(response, fixture) self.assertEqual(200, response.status_int) content_types = filter(lambda h: h[0] == 'Content-Type', response.headerlist) self.assertEqual(1, len(content_types)) self.assertEqual('application/json', response.content_type) self.assertEqual('{"key": "value"}', response.body) class JSONRequestDeserializerTest(HeatTestCase): def test_has_body_no_content_length(self): request = wsgi.Request.blank('/') request.method = 'POST' request.body = 'asdf' request.headers.pop('Content-Length') request.headers['Content-Type'] = 'application/json' self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_zero_content_length(self): request = wsgi.Request.blank('/') request.method = 'POST' request.body = 'asdf' request.headers['Content-Length'] = 0 request.headers['Content-Type'] = 'application/json' self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_has_content_length_no_content_type(self): request = wsgi.Request.blank('/') request.method = 'POST' request.body = '{"key": "value"}' self.assertIn('Content-Length', request.headers) self.assertTrue(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_has_content_length_plain_content_type(self): request = wsgi.Request.blank('/') request.method = 'POST' request.body = '{"key": "value"}' self.assertIn('Content-Length', request.headers) request.headers['Content-Type'] = 'text/plain' self.assertTrue(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_has_content_type_malformed(self): request = wsgi.Request.blank('/') request.method = 'POST' request.body = 'asdf' self.assertIn('Content-Length', request.headers) request.headers['Content-Type'] = 'application/json' self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_has_content_type(self): request = wsgi.Request.blank('/') request.method = 'POST' request.body = '{"key": "value"}' self.assertIn('Content-Length', request.headers) request.headers['Content-Type'] = 'application/json' self.assertTrue(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_has_wrong_content_type(self): request = wsgi.Request.blank('/') request.method = 'POST' request.body = '{"key": "value"}' self.assertIn('Content-Length', request.headers) request.headers['Content-Type'] = 'application/xml' self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_has_aws_content_type_only(self): request = wsgi.Request.blank('/?ContentType=JSON') request.method = 'GET' request.body = '{"key": "value"}' self.assertIn('Content-Length', request.headers) self.assertTrue(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_respect_aws_content_type(self): request = wsgi.Request.blank('/?ContentType=JSON') request.method = 'GET' request.body = '{"key": "value"}' self.assertIn('Content-Length', request.headers) request.headers['Content-Type'] = 'application/xml' self.assertTrue(wsgi.JSONRequestDeserializer().has_body(request)) def test_has_body_content_type_with_get(self): request = wsgi.Request.blank('/') request.method = 'GET' request.body = '{"key": "value"}' self.assertIn('Content-Length', request.headers) self.assertTrue(wsgi.JSONRequestDeserializer().has_body(request)) def test_no_body_no_content_length(self): request = wsgi.Request.blank('/') self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request)) def test_from_json(self): fixture = '{"key": "value"}' expected = {"key": "value"} actual = wsgi.JSONRequestDeserializer().from_json(fixture) self.assertEqual(expected, actual) def test_from_json_malformed(self): fixture = 'kjasdklfjsklajf' self.assertRaises(webob.exc.HTTPBadRequest, wsgi.JSONRequestDeserializer().from_json, fixture) def test_default_no_body(self): request = wsgi.Request.blank('/') actual = wsgi.JSONRequestDeserializer().default(request) expected = {} self.assertEqual(expected, actual) def test_default_with_body(self): request = wsgi.Request.blank('/') request.method = 'POST' request.body = '{"key": "value"}' actual = wsgi.JSONRequestDeserializer().default(request) expected = {"body": {"key": "value"}} self.assertEqual(expected, actual) def test_default_with_get_with_body(self): request = wsgi.Request.blank('/') request.method = 'GET' request.body = '{"key": "value"}' actual = wsgi.JSONRequestDeserializer().default(request) expected = {"body": {"key": "value"}} self.assertEqual(expected, actual) def test_default_with_get_with_body_with_aws(self): request = wsgi.Request.blank('/?ContentType=JSON') request.method = 'GET' request.body = '{"key": "value"}' actual = wsgi.JSONRequestDeserializer().default(request) expected = {"body": {"key": "value"}} self.assertEqual(expected, actual) def test_from_json_exceeds_max_json_mb(self): cfg.CONF.set_override('max_json_body_size', 10) body = json.dumps(['a'] * cfg.CONF.max_json_body_size) self.assertTrue(len(body) > cfg.CONF.max_json_body_size) error = self.assertRaises(exception.RequestLimitExceeded, wsgi.JSONRequestDeserializer().from_json, body) msg = 'Request limit exceeded: JSON body size ' + \ '(%s bytes) exceeds maximum allowed size (%s bytes).' % \ (len(body), cfg.CONF.max_json_body_size) self.assertEqual(msg, str(error))
40.511682
79
0.630602
4a14f4e575b2ec54aa133ce3b2fca0d8dd934262
1,302
py
Python
app/modules/social/video.py
Eastwu5788/Heron
646eeaacea77e293c6eccc6dad82a04ece9294a3
[ "Apache-2.0" ]
7
2018-01-29T02:46:31.000Z
2018-03-25T11:15:10.000Z
app/modules/social/video.py
Eastwu5788/Heron
646eeaacea77e293c6eccc6dad82a04ece9294a3
[ "Apache-2.0" ]
4
2021-06-08T19:38:03.000Z
2022-03-11T23:18:46.000Z
app/modules/social/video.py
Eastwu5788/Heron
646eeaacea77e293c6eccc6dad82a04ece9294a3
[ "Apache-2.0" ]
1
2021-06-12T14:14:35.000Z
2021-06-12T14:14:35.000Z
from . import social from app import db from app.modules.base.base_handler import BaseHandler from app.models.social.image import ImageModel from app.models.social.video import VideoModel from app.helper.auth import login_required from app.helper.upload import UploadImage, UploadVideo from app.helper.response import json_success_response, json_fail_response class UploadChatVideoHandler(BaseHandler): @login_required def post(self): upload_video = UploadVideo() upload_image = UploadImage() if not upload_video.videos: return json_fail_response(2202) if not upload_image.images: return json_fail_response(2203) upload_video.save_videos() upload_image.save_images() img_model = upload_image.images[0]["image"] video_model = upload_video.videos[0]["video"] video_model.cover_id = img_model.image_id db.session.commit() video_info = VideoModel.format_video_info(video_model) video_info["video_id"] = video_model.video_id video_info["tiny_img"] = ImageModel.generate_image_url(img_model, "e") return json_success_response(video_info) social.add_url_rule("/video/uploadchatvideo", view_func=UploadChatVideoHandler.as_view("video_upload_chat_video"))
32.55
114
0.738095
4a14f4f6d2d8263d7730a1217c9daefadb3efe45
387
py
Python
main_file.py
das-soham/PortfolioBuilder
891017ced2eedf9b5514c448d779b2622868422e
[ "MIT" ]
null
null
null
main_file.py
das-soham/PortfolioBuilder
891017ced2eedf9b5514c448d779b2622868422e
[ "MIT" ]
null
null
null
main_file.py
das-soham/PortfolioBuilder
891017ced2eedf9b5514c448d779b2622868422e
[ "MIT" ]
null
null
null
import TradeBook import Portfolio from Quotes import * import sqlite3 import os #portfolio = Portfolio.Portfolio() #portfolio.build('TradeBook',db_path='K:\/',db_name='Portfolio.db') #portfolio.write_db(db_path='K:\/',db_name='Portfolio.db') #print(portfolio.portfolio) connexion = sqlite3.connect(os.path.join('K:\/','Portfolio.db')) distinct_positions(connexion,connexion.cursor())
25.8
67
0.764858
4a14f52fc781a0aa82067b792f633978f65f53de
1,591
py
Python
M7/Projeto_Final.py
DouglasCarvalhoPereira/Interact-OS-PYTHON
2405d34a0624f1e1277399c936d87d444cf0a7d6
[ "MIT" ]
null
null
null
M7/Projeto_Final.py
DouglasCarvalhoPereira/Interact-OS-PYTHON
2405d34a0624f1e1277399c936d87d444cf0a7d6
[ "MIT" ]
null
null
null
M7/Projeto_Final.py
DouglasCarvalhoPereira/Interact-OS-PYTHON
2405d34a0624f1e1277399c936d87d444cf0a7d6
[ "MIT" ]
null
null
null
#Projeto_Final # >>> Escrevendo um roteiro do Zero # 1° - Importante entender completamente o problema # 2° - Identificar as entradas e saídas necessárias # 3° - Realizar uma pesquisa para saber se já existe uma ferramenta ou módulo pronto para isso # 4° - Planejamento, saber quais dados são úteis para a solução, ordem de operações que preciso executar # e como todas as peças se uniem. # É comum escrever um documento detalhado, a declaração do problema as ferramentas que serão usadas # para resolve-lo e o plano de ataque a uma solução. # Pedir feedback de outros programadores garante novos pontos de vistas e soluções possíveis. # 5° - Escrita real do Script, verificando erros e possíveis melhoras. # 1 - Encontrar linhas de registros específicas em um arquivo de log. - ==> Expressões Regulares | regex101.com # 2 - Contar quantos erros são do mesmo tipo - ==> 1 Dicionário para... # Classificar os dados no dicionários por critérios diferentes # 3 - Quantas informações e mensagens de erro existem para um determinado usuário - ==> Outro Dicionário para... # Classificar os dados no dicionários por critérios diferentes # 4 - A saída do script deve ser um par de arquivos CSV, cada um deles contendo os nomes das colunas e dados na ordem que precisam ser apresentados. # 5 - Em seguida chamar o script CSV html.py para criar arquivos HTML com base em dados CSV # O segredo é passar dois parametros para ele! # 1° - O nome do arquivo CSV a ser lido e o # 2° - Nome do HTML gerado # Pode ser feito usando Python ou Bash
56.821429
148
0.736015
4a14f552c46151954d4ad3daf584c8154ad12442
2,386
py
Python
custom_components/hasl/__init__.py
Ziqqo/hasl-platform
27386314bf58626538d59c38d89249b07ed9256a
[ "Apache-2.0" ]
null
null
null
custom_components/hasl/__init__.py
Ziqqo/hasl-platform
27386314bf58626538d59c38d89249b07ed9256a
[ "Apache-2.0" ]
null
null
null
custom_components/hasl/__init__.py
Ziqqo/hasl-platform
27386314bf58626538d59c38d89249b07ed9256a
[ "Apache-2.0" ]
null
null
null
"""HomeAssistant Sensor for SL (Storstockholms Lokaltrafik)""" import datetime import json import logging from datetime import timedelta import homeassistant.helpers.config_validation as cv import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.dt import now from hasl import (haslapi, pu1api, tp3api, HASL_Error, HASL_API_Error, HASL_HTTP_Error) __version__ = '2.2.0' _LOGGER = logging.getLogger(__name__) DOMAIN = "hasl" VERSION = __version__ CONF_PU1_KEY = 'pu1key' CONF_TP3_KEY = 'tp3key' # Schema to validate the configured MQTT topic PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_PU1_KEY): cv.string, vol.Optional(CONF_TP3_KEY): cv.string }, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Setup our communication platform.""" try: _pu1api = pu1api(config[DOMAIN][CONF_PU1_KEY]) except: _pu1api = pu1api('') try: _tp3api = tp3api(config[DOMAIN][CONF_TP3_KEY]) except: _tp3api = tp3api('') def clear_cache(call): for sensor in hass.data[DOMAIN]: hass.data[DOMAIN][sensor] = '' jsonFile = open(hass.config.path('.storage/haslcache.json'), "w") jsonFile.write(json.dumps({})) jsonFile.close() return "{ 'result': true }" def find_location(call): search_string = call.data.get('search_string') return _pu1api.request(search_string) def find_trip_id(call): origin = call.data.get('org') destination = call.data.get('dest') return _tp3api.request(origin, destination, '', '', '', '') def find_trip_pos(call): olat = call.data.get('orig_lat') olon = call.data.get('orig_long') dlat = call.data.get('dest_lat') dlon = call.data.get('dest_long') return _tp3api.request('', '', olat, olon, dlat, dlon) # track_time_interval(hass, FUNC, INTERVALL). hass.services.register(DOMAIN, 'clear_cache', clear_cache) hass.services.register(DOMAIN, 'find_location', find_location) hass.services.register(DOMAIN, 'find_trip_id', find_trip_id) hass.services.register(DOMAIN, 'find_trip_pos', find_trip_pos) # Return boolean to indicate that initialization was successfully. return True
29.097561
73
0.685247
4a14f66f3d509dd4a3744a03188e653f1212b505
6,068
py
Python
mirobot/__init__.py
mirobot/mirobot-py
c745ec309c3911eeb67bf961dcd5205addd83db1
[ "MIT" ]
4
2017-08-07T15:27:53.000Z
2021-03-07T13:41:59.000Z
mirobot/__init__.py
mirobot/mirobot-py
c745ec309c3911eeb67bf961dcd5205addd83db1
[ "MIT" ]
2
2015-12-28T16:53:18.000Z
2018-05-04T15:59:26.000Z
mirobot/__init__.py
mirobot/mirobot-py
c745ec309c3911eeb67bf961dcd5205addd83db1
[ "MIT" ]
11
2015-11-07T05:41:35.000Z
2021-12-29T12:42:50.000Z
try: from queue import Queue except ImportError: from Queue import Queue from mirobot.socket_handler import SocketHandler import time import string import random import sys import json try: import urllib.request as request except ImportError: import urllib2 as request _sentinel = object() class Mirobot: def __init__(self, address = None, debug = False): # Initialisation for the id field self.nonce = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(4)) self.n = 0 self.debug = debug # callbacks self.__on_error = None self.__on_collide = None self.__on_follow = None if address: self.connect(address) def connect(self, address): # Set up the socket handling self.__send_q = Queue() self.recv_q = Queue() self.socket = SocketHandler(address, self.__send_q, self.recv_q, debug=self.debug, sentinel = _sentinel) self.socket.start() # get the version once connected self.version = self.__send('version') def connectMenu(self, devices): print("Select the Mirobot to connect to:") for i, device in enumerate(devices): print(" %i: %s" % (i+1, device['name'])) try: choice = raw_input("Select a number:") except: choice = input("Select a number: ") return choice def autoConnect(self, id = None, interactive = False): try: res = request.urlopen("http://local.mirobot.io/devices.json").read() except: raise Exception("Could not connect to discovery server") try: devices = json.loads(str(res, 'utf-8')) except TypeError: devices = json.loads(res) print(devices) if interactive: choice = self.connectMenu(devices['devices']) print("Connecting to: %s" % devices['devices'][int(choice)-1]['name']) self.connect(devices['devices'][int(choice)-1]['address']) else: if id: filtered = [item for item in devices['devices'] if item['name'] == id] if len(filtered) == 0: raise Exception("No Mirobots found with id: %s" % id) elif len(filtered) == 1: # Connect to the only device we've found self.connect(filtered[0]['address']) else: raise Exception("Multiple Mirobots found with id: %s" % id) else: if len(devices['devices']) == 0: raise Exception("No Mirobots found") elif len(devices['devices']) == 1: # Connect to the only device we've found self.connect(devices['devices'][0]['address']) else: raise Exception("Too many Mirobots found to auto connect without specifying an ID") def errorNotify(self, on_error): self.__on_error = on_error def collideNotify(self, on_collide): enabled = bool(on_collide) self.__on_collide = on_collide self.__send('collideNotify', ('false','true')[enabled]) def followNotify(self, on_follow): enabled = bool(on_follow) self.__on_follow = on_follow self.__send('followNotify', ('false','true')[enabled]) def ping(self): return self.__send('ping') def uptime(self): return self.__send('uptime') def forward(self, distance): return self.__send('forward', distance, distance/20) def back(self, distance): return self.__send('back', distance, distance/20) def left(self, degrees): return self.__send('left', degrees, degrees/20) def right(self, degrees): return self.__send('right', degrees, degrees/20) def penup(self): return self.__send('penup') def pendown(self): return self.__send('pendown') def beep(self, milliseconds = 500): return self.__send('beep', milliseconds, milliseconds / 500) def collideState(self): return self.__send('collideState') def followState(self): return self.__send('followState') def disconnect(self): self.__send_q.put(_sentinel) def __send(self, cmd, arg = None, timeout = 1): # Assemble the message msg = {'cmd': cmd} if (arg is not None): msg['arg'] = str(arg) # Send the message and handle exceptions try: return self.__send_or_raise(msg, timeout) except Exception as x: if not self.__on_error: raise return self.__on_error(x, msg, timeout, self) def __send_or_raise(self, msg, timeout): msg_id = msg['id'] = self.generate_id() self.__send_q.put(msg) deadline = timeout + time.time() accepted = False while True: try: timeout = max(1, deadline - time.time()) incoming = self.recv_q.get(block = True, timeout = timeout) except KeyboardInterrupt as e: self.disconnect() raise e except: # .get raises "Empty" if (accepted): raise IOError("Mirobot timed out awaiting completion of %r" % (msg,)) raise IOError("Mirobot timed out awaiting acceptance of %r" % (msg,)) try: rx_id = incoming.get('id','???') if rx_id != msg_id: if (rx_id == 'collide'): self.__collide(incoming) continue if (rx_id == 'follow'): self.__follow(incoming) continue raise IOError("Received message ID (%s) does not match expected (%s)" % (rx_id, msg_id)) rx_status = incoming.get('status','???') if rx_status == 'accepted': accepted = True elif rx_status == 'complete': return incoming.get('msg',None) elif rx_status == 'notify': pass else: raise IOError("Received message status (%s) unexpected" % (rx_status,)) finally: self.recv_q.task_done() def __collide(self, msg): if self.__on_collide: left = msg['msg'] in ('both','left') right = msg['msg'] in ('both','right') self.__on_collide(left, right, msg, self) def __follow(self, msg): if self.__on_follow: state = int(msg['msg']) self.__on_follow(state, msg, self) def generate_id(self): self.n = (self.n + 1) % 0x10000 return '%s%04x' % (self.nonce, self.n)
29.891626
108
0.623599
4a14f82b5e611777a6a63f8b615dfc52398ba19e
621
py
Python
asn1tools/codecs/permitted_alphabet.py
cromulencellc/asn1tools
30eb88e287cc1616903858aa96ee8791a4d7bf1c
[ "MIT" ]
198
2017-08-04T21:49:15.000Z
2022-03-26T10:11:21.000Z
asn1tools/codecs/permitted_alphabet.py
cromulencellc/asn1tools
30eb88e287cc1616903858aa96ee8791a4d7bf1c
[ "MIT" ]
144
2017-09-29T12:06:51.000Z
2022-03-29T13:04:44.000Z
asn1tools/codecs/permitted_alphabet.py
cromulencellc/asn1tools
30eb88e287cc1616903858aa96ee8791a4d7bf1c
[ "MIT" ]
73
2017-10-09T13:33:28.000Z
2022-03-11T01:35:22.000Z
"""Permitted alphabet. """ import string try: unichr except NameError: unichr = chr NUMERIC_STRING = ' 0123456789' PRINTABLE_STRING = (string.ascii_uppercase + string.ascii_lowercase + string.digits + " '()+,-./:=?") IA5_STRING = ''.join([chr(v) for v in range(128)]) # ud800 - udfff are reserved code points for utf-16 surrogates. # at this point, do not support code points in supplementary planes. BMP_STRING = ''.join([unichr(v) for v in range(65536) if v < 0xd800 or v > 0xdfff]) VISIBLE_STRING = ''.join([chr(v) for v in range(32, 127)])
23
83
0.618357
4a14f838b8f9d2561def3277b097198e6b4bc19b
2,169
py
Python
pdftextpos.py
pr4u4t/pdf-text-position
2be53194f745a1b74ef7f638a0ea4d592ab6a805
[ "MIT" ]
null
null
null
pdftextpos.py
pr4u4t/pdf-text-position
2be53194f745a1b74ef7f638a0ea4d592ab6a805
[ "MIT" ]
null
null
null
pdftextpos.py
pr4u4t/pdf-text-position
2be53194f745a1b74ef7f638a0ea4d592ab6a805
[ "MIT" ]
null
null
null
from pathlib import Path from typing import Iterable, Any from pdfminer.high_level import extract_pages import sys import os #Global page counter page = 1 def show_ltitem_hierarchy(o: Any, depth=0): """Show location and text of LTItem and all its descendants""" if depth == 0: print('element x1 y1 x2 y2 text') print('------------------------------ --- --- --- ---- -----') #Comment nex condition to have individual characters position on page if o.__class__.__name__ == "LTChar": return if o.__class__.__name__ == "LTAnno": return if o.__class__.__name__ == "LTCurve": return if o.__class__.__name__ == "LTFigure": return if o.__class__.__name__ == "LTRect": return if o.__class__.__name__ == "LTLine": return if o.__class__.__name__ != "LTTextBoxHorizontal": name = f'{get_indented_name(o, depth):<30.30s} ' pos = f'{get_optional_bbox(o)} ' text = f'{get_optional_text(o)}' if text != '': print(name,pos,text) if isinstance(o, Iterable): for i in o: show_ltitem_hierarchy(i, depth=depth + 1) def get_indented_name(o: Any, depth: int) -> str: """Indented name of LTItem""" if o.__class__.__name__ == "LTPage": global page ctrl=' '+str(page) page+=1 else: ctrl='' return ' ' * depth + o.__class__.__name__ + ctrl def get_optional_bbox(o: Any) -> str: """Bounding box of LTItem if available, otherwise empty string""" if hasattr(o, 'bbox'): return ''.join(f'{i:<4.0f}' for i in o.bbox) return '' def get_optional_text(o: Any) -> str: """Text of LTItem if available, otherwise empty string""" if hasattr(o, 'get_text'): return o.get_text().strip() return '' if len(sys.argv) != 2: print("Input pdf file must be specified") print('fg.:',sys.argv[0], 'input.pdf') sys.exit() path = Path(sys.argv[1]).expanduser() if not os.path.isfile(path): print('Specified file does not exists!') sys.exit() pages = extract_pages(path) show_ltitem_hierarchy(pages)
25.22093
73
0.596127
4a14f97534816b3e84ce8c199c94b1522fb7d23c
9,026
py
Python
test/functional/feature_signet.py
widecoin-project/widecoin
143b190a61f95a4b7d40c5da484cdde8f0c5ac3f
[ "MIT" ]
8
2021-04-17T16:11:50.000Z
2021-06-23T05:30:39.000Z
test/functional/feature_signet.py
widecoin-project/widecoin
143b190a61f95a4b7d40c5da484cdde8f0c5ac3f
[ "MIT" ]
1
2021-04-18T11:57:59.000Z
2021-04-18T11:57:59.000Z
test/functional/feature_signet.py
widecoin-project/widecoin
143b190a61f95a4b7d40c5da484cdde8f0c5ac3f
[ "MIT" ]
7
2021-04-17T16:04:12.000Z
2021-06-10T00:54:53.000Z
#!/usr/bin/env python3 # Copyright (c) 2019-2020 The Widecoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test basic signet functionality""" from decimal import Decimal from test_framework.test_framework import WidecoinTestFramework from test_framework.util import assert_equal signet_blocks = [ '00000020f61eee3b63a380a477a063af32b2bbc97c9ff9f01f2c4225e973988108000000f575c83235984e7dc4afc1f30944c170462e84437ab6f2d52e16878a79e4678bd1914d5fae77031eccf4070001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025151feffffff0200f2052a010000001600149243f727dd5343293eb83174324019ec16c2630f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205e423a8754336ca99dbe16509b877ef1bf98d008836c725005b3c787c41ebe46022047246e4467ad7cc7f1ad98662afcaf14c115e0095a227c7b05c5182591c23e7e01000120000000000000000000000000000000000000000000000000000000000000000000000000', '00000020533b53ded9bff4adc94101d32400a144c54edc5ed492a3b26c63b2d686000000b38fef50592017cfafbcab88eb3d9cf50b2c801711cad8299495d26df5e54812e7914d5fae77031ecfdd0b0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025251feffffff0200f2052a01000000160014fd09839740f0e0b4fc6d5e2527e4022aa9b89dfa0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022031d64a1692cdad1fc0ced69838169fe19ae01be524d831b95fcf5ea4e6541c3c02204f9dea0801df8b4d0cd0857c62ab35c6c25cc47c930630dc7fe723531daa3e9b01000120000000000000000000000000000000000000000000000000000000000000000000000000', '000000202960f3752f0bfa8858a3e333294aedc7808025e868c9dc03e71d88bb320000007765fcd3d5b4966beb338bba2675dc2cf2ad28d4ad1d83bdb6f286e7e27ac1f807924d5fae77031e81d60b0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025351feffffff0200f2052a010000001600141e5fb426042692ae0e87c070e78c39307a5661c20000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205de93694763a42954865bcf1540cb82958bc62d0ec4eee02070fb7937cd037f4022067f333753bce47b10bc25eb6e1f311482e994c862a7e0b2d41ab1c8679fd1b1101000120000000000000000000000000000000000000000000000000000000000000000000000000', '00000020b06443a13ae1d3d50faef5ecad38c6818194dc46abca3e972e2aacdae800000069a5829097e80fee00ac49a56ea9f82d741a6af84d32b3bc455cf31871e2a8ac27924d5fae77031e9c91050001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025451feffffff0200f2052a0100000016001430db2f8225dcf7751361ab38735de08190318cb70000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402200936f5f9872f6df5dd242026ad52241a68423f7f682e79169a8d85a374eab9b802202cd2979c48b321b3453e65e8f92460db3fca93cbea8539b450c959f4fbe630c601000120000000000000000000000000000000000000000000000000000000000000000000000000', '000000207ed403758a4f228a1939418a155e2ebd4ae6b26e5ffd0ae433123f7694010000542e80b609c5bc58af5bdf492e26d4f60cd43a3966c2e063c50444c29b3757a636924d5fae77031ee8601d0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025551feffffff0200f2052a01000000160014edc207e014df34fa3885dff97d1129d356e1186a0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022021a3656609f85a66a2c5672ed9322c2158d57251040d2716ed202a1fe14f0c12022057d68bc6611f7a9424a7e00bbf3e27e6ae6b096f60bac624a094bc97a59aa1ff01000120000000000000000000000000000000000000000000000000000000000000000000000000', '000000205bea0a88d1422c3df08d766ad72df95084d0700e6f873b75dd4e986c7703000002b57516d33ed60c2bdd9f93d6d5614083324c837e68e5ba6e04287a7285633585924d5fae77031ed171960001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025651feffffff0200f2052a010000001600143ae612599cf96f2442ce572633e0251116eaa52f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022059a7c54de76bfdbb1dd44c78ea2dbd2bb4e97f4abad38965f41e76433e56423c022054bf17f04fe17415c0141f60eebd2b839200f574d8ad8d55a0917b92b0eb913401000120000000000000000000000000000000000000000000000000000000000000000000000000', '00000020daf3b60d374b19476461f97540498dcfa2eb7016238ec6b1d022f82fb60100007a7ae65b53cb988c2ec92d2384996713821d5645ffe61c9acea60da75cd5edfa1a944d5fae77031e9dbb050001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025751feffffff0200f2052a01000000160014ef2dceae02e35f8137de76768ae3345d99ca68860000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402202b3f946d6447f9bf17d00f3696cede7ee70b785495e5498274ee682a493befd5022045fc0bcf9332243168b5d35507175f9f374a8eba2336873885d12aada67ea5f601000120000000000000000000000000000000000000000000000000000000000000000000000000', '00000020457cc5f3c2e1a5655bc20e20e48d33e1b7ea68786c614032b5c518f0b6000000541f36942d82c6e7248275ff15c8933487fbe1819c67a9ecc0f4b70bb7e6cf672a944d5fae77031e8f39860001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025851feffffff0200f2052a0100000016001472a27906947c06d034b38ba2fa13c6391a4832790000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402202d62805ce60cbd60591f97f949b5ea5bd7e2307bcde343e6ea8394da92758e72022053a25370b0aa20da100189b7899a8f8675a0fdc60e38ece6b8a4f98edd94569e01000120000000000000000000000000000000000000000000000000000000000000000000000000', '00000020a2eb61eb4f3831baa3a3363e1b42db4462663f756f07423e81ed30322102000077224de7dea0f8d0ec22b1d2e2e255f0a987b96fe7200e1a2e6373f48a2f5b7894954d5fae77031e36867e0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025951feffffff0200f2052a01000000160014aa0ad9f26801258382e0734dceec03a4a75f60240000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402206fa0d59990eed369bd7375767c9a6c9369fae209152b8674e520da270605528c0220749eed3b12dbe3f583f505d21803e4aef59c8e24c5831951eafa4f15a8f92c4e01000120000000000000000000000000000000000000000000000000000000000000000000000000', '00000020a868e8514be5e46dabd6a122132f423f36a43b716a40c394e2a8d063e1010000f4c6c717e99d800c699c25a2006a75a0c5c09f432a936f385e6fce139cdbd1a5e9964d5fae77031e7d026e0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025a51feffffff0200f2052a01000000160014aaa671c82b138e3b8f510cd801e5f2bd0aa305940000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022042309f4c3c7a1a2ac8c24f890f962df1c0086cec10be0868087cfc427520cb2702201dafee8911c269b7e786e242045bb57cef3f5b0f177010c6159abae42f646cc501000120000000000000000000000000000000000000000000000000000000000000000000000000', ] class SignetBasicTest(WidecoinTestFramework): def set_test_params(self): self.chain = "signet" self.num_nodes = 6 self.setup_clean_chain = True shared_args1 = ["-signetchallenge=51"] # OP_TRUE shared_args2 = [] # default challenge # we use the exact same challenge except we do it as a 2-of-2, which means it should fail shared_args3 = ["-signetchallenge=522103ad5e0edad18cb1f0fc0d28a3d4f1f3e445640337489abb10404f2d1e086be430210359ef5021964fe22d6f8e05b2463c9540ce96883fe3b278760f048f5189f2e6c452ae"] self.extra_args = [ shared_args1, shared_args1, shared_args2, shared_args2, shared_args3, shared_args3, ] def run_test(self): self.log.info("basic tests using OP_TRUE challenge") self.log.info('getmininginfo') mining_info = self.nodes[0].getmininginfo() assert_equal(mining_info['blocks'], 0) assert_equal(mining_info['chain'], 'signet') assert 'currentblocktx' not in mining_info assert 'currentblockweight' not in mining_info assert_equal(mining_info['networkhashps'], Decimal('0')) assert_equal(mining_info['pooledtx'], 0) self.nodes[0].generate(1) self.log.info("pregenerated signet blocks check") height = 0 for block in signet_blocks: assert_equal(self.nodes[2].submitblock(block), None) height += 1 assert_equal(self.nodes[2].getblockcount(), height) self.log.info("pregenerated signet blocks check (incompatible solution)") assert_equal(self.nodes[4].submitblock(signet_blocks[0]), 'bad-signet-blksig') self.log.info("test that signet logs the network magic on node start") with self.nodes[0].assert_debug_log(["Signet derived magic (message start)"]): self.restart_node(0) if __name__ == '__main__': SignetBasicTest().main()
120.346667
665
0.910259
4a14f9fcb9385eee54747669194429b759fe55ec
4,460
py
Python
authdata/tests/test_models.py
mpassid/MPASSid-data
1885721366165c768f2b66513f9c124b8aeaadb5
[ "MIT" ]
null
null
null
authdata/tests/test_models.py
mpassid/MPASSid-data
1885721366165c768f2b66513f9c124b8aeaadb5
[ "MIT" ]
8
2018-05-03T05:39:38.000Z
2021-03-19T21:59:25.000Z
authdata/tests/test_models.py
mpassid/MPASSid-data
1885721366165c768f2b66513f9c124b8aeaadb5
[ "MIT" ]
1
2019-09-25T05:33:42.000Z
2019-09-25T05:33:42.000Z
# -*- coding: utf-8 -*- # The MIT License (MIT) # # Copyright (c) 2014-2015 Haltu Oy, http://haltu.fi # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # pylint: disable=locally-disabled, no-member from django.test import TestCase from authdata.tests import factories as f from authdata import models class TestUser(TestCase): def test_user(self): o = f.UserFactory() self.assertTrue(o) self.assertTrue(unicode(o) == o.username) u = models.User.objects.create(username='foo') u.email = 'foo@bar.com' u.save() self.assertTrue(u.email) def test_user_timestamps(self): u = f.UserFactory() self.assertTrue(u.created) self.assertTrue(u.modified) def test_user_last_login(self): u = f.UserFactory() self.assertTrue(u.last_login) def test_user_external_fields(self): u = f.UserFactory() self.assertFalse(u.external_source) self.assertFalse(u.external_id) class TestSource(TestCase): def test_source(self): o = f.SourceFactory(name=u'Ääkköstesti') self.assertTrue(o) self.assertTrue(unicode(o), o.__unicode__()) self.assertTrue(str(o)) self.assertTrue(o.name) self.assertTrue(o.created) self.assertTrue(o.modified) class TestMunicipality(TestCase): def test_municipality(self): o = f.MunicipalityFactory(name=u'Ääkköstesti') self.assertTrue(o) self.assertTrue(o.name) self.assertTrue(unicode(o) == o.name) self.assertTrue(o.created) self.assertTrue(o.modified) class TestSchool(TestCase): def test_school(self): o = f.SchoolFactory(name=u'Ääkköstesti', municipality__name=u'Ääkköskunta') self.assertTrue(o) self.assertTrue(o.name) self.assertIn(o.name, unicode(o)) self.assertIn(o.municipality.name, unicode(o)) self.assertTrue(o.created) self.assertTrue(o.modified) class TestAttribute(TestCase): def test_attribute(self): o = f.AttributeFactory(name=u'Ääkköstesti') self.assertTrue(o) self.assertTrue(o.name) self.assertTrue(unicode(o) == o.name) self.assertTrue(o.created) self.assertTrue(o.modified) class TestUserAttribute(TestCase): def test_userattribute(self): o = f.UserAttributeFactory(attribute__name=u'Ääkkösattribute', value=u'Ääkkösvalue') self.assertTrue(o) self.assertTrue(o.value) self.assertTrue(o.created) self.assertTrue(o.modified) self.assertIn(u'Ääkkösattribute', unicode(o)) self.assertIn(u'Ääkkösvalue', unicode(o)) class TestRole(TestCase): def test_role(self): o = f.RoleFactory(name=u'Ääkkösrooli') self.assertTrue(o) self.assertTrue(o.name) self.assertTrue(unicode(o) == o.name) self.assertTrue(o.created) self.assertTrue(o.modified) class TestAttendance(TestCase): def test_attendance(self): o = f.AttendanceFactory(school__name=u'Ääkkösschool', school__municipality__name=u'Ääkkösmunicipality', role__name=u'Ääkkösrole', group=u'Ääkkösgroup') self.assertTrue(o) self.assertTrue(o.created) self.assertTrue(o.modified) self.assertIn(u'Ääkkösschool', unicode(o)) self.assertIn(u'Ääkkösrole', unicode(o)) self.assertIn(u'Ääkkösmunicipality', unicode(o)) class TestTimeStampedModel(TestCase): def test_timestampedmodel(self): o = models.TimeStampedModel() self.assertTrue(o.created is None) self.assertTrue(o.modified is None) # vim: tabstop=2 expandtab shiftwidth=2 softtabstop=2
31.188811
79
0.724664
4a14fc0e09525c1aed1ac6aaca26a703cb03cbb3
3,667
py
Python
basicsr/utils/options.py
yuangan/Simple-SR
630d2f9441b116620af88ff882eca4673dedc047
[ "MIT" ]
null
null
null
basicsr/utils/options.py
yuangan/Simple-SR
630d2f9441b116620af88ff882eca4673dedc047
[ "MIT" ]
null
null
null
basicsr/utils/options.py
yuangan/Simple-SR
630d2f9441b116620af88ff882eca4673dedc047
[ "MIT" ]
null
null
null
import yaml from collections import OrderedDict from os import path as osp def ordered_yaml(): """Support OrderedDict for yaml. Returns: yaml Loader and Dumper. """ try: from yaml import CDumper as Dumper from yaml import CLoader as Loader except ImportError: from yaml import Dumper, Loader _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG def dict_representer(dumper, data): return dumper.represent_dict(data.items()) def dict_constructor(loader, node): return OrderedDict(loader.construct_pairs(node)) Dumper.add_representer(OrderedDict, dict_representer) Loader.add_constructor(_mapping_tag, dict_constructor) return Loader, Dumper def parse(opt_path, is_train=True): """Parse option file. Args: opt_path (str): Option file path. is_train (str): Indicate whether in training or not. Default: True. Returns: (dict): Options. """ with open(opt_path, mode='r') as f: Loader, _ = ordered_yaml() opt = yaml.load(f, Loader=Loader) opt['is_train'] = is_train # datasets for phase, dataset in opt['datasets'].items(): # for several datasets, e.g., test_1, test_2 phase = phase.split('_')[0] dataset['phase'] = phase if 'scale' in opt: dataset['scale'] = opt['scale'] if dataset.get('dataroot_gt') is not None: dataset['dataroot_gt'] = osp.expanduser(dataset['dataroot_gt']) if dataset.get('dataroot_lq') is not None: dataset['dataroot_lq'] = osp.expanduser(dataset['dataroot_lq']) # paths for key, path in opt['path'].items(): if path and key != 'strict_load': opt['path'][key] = osp.expanduser(path) opt['path']['root'] = osp.abspath( osp.join(__file__, osp.pardir, osp.pardir, osp.pardir)) if is_train: experiments_root = osp.join(opt['path']['root'], 'experiments', opt['name']) opt['path']['experiments_root'] = experiments_root opt['path']['models'] = osp.join(experiments_root, 'models') opt['path']['training_states'] = osp.join(experiments_root, 'training_states') opt['path']['log'] = experiments_root opt['path']['visualization'] = osp.join(experiments_root, 'visualization') # change some options for debug mode if 'debug' in opt['name']: opt['val']['val_freq'] = 8 opt['logger']['print_freq'] = 1 opt['logger']['save_checkpoint_freq'] = 8 else: # test results_root = osp.join(opt['path']['root'], 'results', opt['name']) opt['path']['results_root'] = results_root opt['path']['log'] = results_root opt['path']['visualization'] = osp.join(results_root, 'visualization') return opt def dict2str(opt, indent_level=1): """dict to string for printing options. Args: opt (dict): Option dict. indent_level (int): Indent level. Default: 1. Return: (str): Option string for printing. """ msg = '\n' for k, v in opt.items(): if isinstance(v, dict): msg += ' ' * (indent_level * 2) + k + ':[' msg += dict2str(v, indent_level + 1) msg += ' ' * (indent_level * 2) + ']\n' else: msg += ' ' * (indent_level * 2) + k + ': ' + str(v) + '\n' return msg
33.642202
79
0.557404
4a14fccbadbc5e4b5a7a504ae1b7530f099382c0
36,342
py
Python
notebooks/shared/utils/post_html.py
deeplook/debuggingbook
ff1b50fc6759f268e457bfa46dd73196b138b985
[ "MIT" ]
null
null
null
notebooks/shared/utils/post_html.py
deeplook/debuggingbook
ff1b50fc6759f268e457bfa46dd73196b138b985
[ "MIT" ]
null
null
null
notebooks/shared/utils/post_html.py
deeplook/debuggingbook
ff1b50fc6759f268e457bfa46dd73196b138b985
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Expand elements in generated HTML # Usage: post-html.py CHAPTER_NAME CHAPTER_1 CHAPTER_2 ... # Note: I suppose this could also be done using Jinja2 templates and ipypublish, # but this thing here works pretty well. # If you'd like to convert this into some more elegant framework, # implement it and send me a pull request -- AZ import argparse import os.path import time import datetime import re import sys import io import html import urllib import shelve try: import nbformat have_nbformat = True except: have_nbformat = False # Process arguments parser = argparse.ArgumentParser() parser.add_argument("--home", help="omit links to notebook, code, and slides", action='store_true') parser.add_argument("--clear-link-cache", help="Clear link cache", action='store_true') parser.add_argument("--include-ready", help="include ready chapters", action='store_true') parser.add_argument("--include-todo", help="include work-in-progress chapters", action='store_true') parser.add_argument("--project", help="project name", default="fuzzingbook") parser.add_argument("--title", help="book title", default="The Fuzzing Book") parser.add_argument("--authors", help="list of authors", default="A. Zeller et al.") parser.add_argument("--twitter", help="twitter handle", default="@FuzzingBook") parser.add_argument("--menu-prefix", help="prefix to html files in menu") parser.add_argument("--all-chapters", help="List of all chapters") parser.add_argument("--public-chapters", help="List of public chapters") parser.add_argument("--ready-chapters", help="List of ready chapters") parser.add_argument("--todo-chapters", help="List of work-in-progress chapters") parser.add_argument("--new-chapters", help="List of new chapters") parser.add_argument("chapter", nargs=1) args = parser.parse_args() # Some fixed strings project = args.project booktitle = args.title authors = args.authors twitter = args.twitter site_html = f"https://www.{project}.org/" github_html = f"https://github.com/uds-se/{project}/" notebook_html = f"https://mybinder.org/v2/gh/uds-se/{project}/master?filepath=docs/" # Menus # For icons, see https://fontawesome.com/cheatsheet menu_start = r""" <nav> <div id="cssmenu"> <ul> <li class="has-sub"><a href="#"><span title="__BOOKTITLE__"><i class="fa fa-fw fa-bars"></i> </span><span class="menu_1">__BOOKTITLE_BETA__</span></a> <ol> <__STRUCTURED_ALL_CHAPTERS_MENU__> <li><a href="__SITE_HTML__html/00_Index.html">Index (beta)</a></i></li> </ol> </li> <li class="has-sub"><a href="#"><span title="__CHAPTER_TITLE__"><i class="fa fa-fw fa-list-ul"></i></span> <span class="menu_2">__CHAPTER_TITLE_BETA__</span></a> <__ALL_SECTIONS_MENU__> </li> """ menu_end = r""" <li class="has-sub"><a href="#"><span title="Share"><i class="fa fa-fw fa-comments"></i> </span> <span class="menu_4">Share</span></a> <ul> <li><a href="__SHARE_TWITTER__" target="popup" __TWITTER_ONCLICK__><i class="fa fa-fw fa-twitter"></i> Share on Twitter</a> <li><a href="__SHARE_FACEBOOK__" target="popup" __FACEBOOK_ONCLICK__><i class="fa fa-fw fa-facebook"></i> Share on Facebook</a> <li><a href="__SHARE_MAIL__"><i class="fa fa-fw fa-envelope"></i> Share by Email</a> <li><a href="#citation" id="cite" onclick="revealCitation()"><i class="fa fa-fw fa-mortar-board"></i> Cite</a> </ul> </li> <li class="has-sub"><a href="#"><span title="Help"><i class="fa fa-fw fa-question-circle"></i></span> <span class="menu_5">Help</span></a> <ul> <li><a href="__SITE_HTML__#Troubleshooting"><i class="fa fa-fw fa-wrench"></i> Troubleshooting</a></li> <li><a href="https://docs.python.org/3/tutorial/" target=_blank><i class="fa fa-fw fa-question-circle"></i> Python Tutorial</a> <li><a href="https://www.dataquest.io/blog/jupyter-notebook-tutorial/" target=_blank><i class="fa fa-fw fa-question-circle"></i> Jupyter Notebook Tutorial</a> <li><a href="__GITHUB_HTML__issues/" target="_blank"><i class="fa fa-fw fa-commenting"></i> Report an Issue</a></li> </ul> </li> </ul> </div> </nav> """ site_header_template = menu_start + r""" <li class="has-sub"><a href="#"><span title="Resources"><i class="fa fa-fw fa-cube"></i> </span><span class="menu_3">Resources</span></a> <ul> <li><a href="__CHAPTER_NOTEBOOK_IPYNB__" target="_blank" class="edit_as_notebook"><i class="fa fa-fw fa-edit"></i> Edit Notebooks</a></li> <li><a href="__SITE_HTML__dist/__PROJECT__-code.zip"><i class="fa fa-fw fa-cube"></i> All Code (.zip)</a></li> <li><a href="__SITE_HTML__dist/__PROJECT__-notebooks.zip"><i class="fa fa-fw fa-cube"></i> All Notebooks (.zip)</a></li> <li><a href="__GITHUB_HTML__" target="_blank"><i class="fa fa-fw fa-github"></i> GitHub Repo</a></li> <li><a href="html/ReleaseNotes.html" target="_blank"><i class="fa fa-fw fa-calendar"></i> Release Notes</a></li> </ul> </li> """ + menu_end # Chapters chapter_header_template = menu_start + r""" <li class="has-sub"><a href="#"><span title="Resources"><i class="fa fa-fw fa-cube"></i> </span><span class="menu_3">Resources</span></a> <ul> <li><a href="__CHAPTER_NOTEBOOK_IPYNB__" target="_blank" class="edit_as_notebook"><i class="fa fa-fw fa-edit"></i> Edit as Notebook</a></li> <li><a href="__SITE_HTML__slides/__CHAPTER__.slides.html" target="_blank"><i class="fa fa-fw fa-video-camera"></i> View Slides</a></li> <li><a href="__SITE_HTML__code/__CHAPTER__.py"><i class="fa fa-fw fa-download"></i> Download Code (.py)</a></li> <li><a href="__SITE_HTML__notebooks/__CHAPTER__.ipynb"><i class="fa fa-fw fa-download"></i> Download Notebook (.ipynb)</a></li> <li><a href="__SITE_HTML__dist/__PROJECT__-code.zip"><i class="fa fa-fw fa-cube"></i> All Code (.zip)</a></li> <li><a href="__SITE_HTML__dist/__PROJECT__-notebooks.zip"><i class="fa fa-fw fa-cube"></i> All Notebooks (.zip)</a></li> <li><a href="__GITHUB_HTML__" target="_blank"><i class="fa fa-fw fa-github"></i> Project Page</a></li> <li><a href="ReleaseNotes.html" target="_blank"><i class="fa fa-fw fa-calendar"></i> Release Notes</a></li> </ul> </li> """ + menu_end # Footers site_citation_template = r""" <div id="citation" class="citation" style="display: none;"> <a name="citation"></a> <h2>How to Cite this Work</h2> <p> __AUTHORS__: "<a href="__SITE_HTML__">__BOOKTITLE__</a>". Retrieved __DATE__. </p> <pre> @book{__BIBTEX_KEY__, author = {__AUTHORS_BIBTEX__}, title = {__BOOKTITLE__}, year = {__YEAR__}, publisher = {CISPA Helmholtz Center for Information Security}, howpublished = {\url{__SITE_HTML__}}, note = {Retrieved __DATE__}, url = {__SITE_HTML__}, urldate = {__DATE__} } </pre> </div> """ chapter_citation_template = r""" <div id="citation" class="citation" style="display: none;"> <a name="citation"></a> <h2>How to Cite this Work</h2> <p> __AUTHORS__: "<a href="__CHAPTER_HTML__">__CHAPTER_TITLE__</a>". In __AUTHORS__, "<a href="__SITE_HTML__">__BOOKTITLE__</a>", <a href="__CHAPTER_HTML__">__CHAPTER_HTML__</a>. Retrieved __DATE__. </p> <pre> @incollection{__BIBTEX_KEY__:__CHAPTER__, author = {__AUTHORS_BIBTEX__}, booktitle = {__BOOKTITLE__}, title = {__CHAPTER_TITLE__}, year = {__YEAR__}, publisher = {CISPA Helmholtz Center for Information Security}, howpublished = {\url{__CHAPTER_HTML__}}, note = {Retrieved __DATE__}, url = {__CHAPTER_HTML__}, urldate = {__DATE__} } </pre> </div> """ common_footer_template = r""" <p class="imprint"> <img style="float:right" src="https://i.creativecommons.org/l/by-nc-sa/4.0/88x31.png" alt="Creative Commons License"> The content of this project is licensed under the <a href="https://creativecommons.org/licenses/by-nc-sa/4.0/" target=_blank>Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License</a>. The source code that is part of the content, as well as the source code used to format and display that content is licensed under the <a href="https://github.com/uds-se/__PROJECT__/blob/master/LICENSE.md#mit-license">MIT License</a>. <a href="__GITHUB_HTML__commits/master/notebooks/__CHAPTER__.ipynb" target=_blank)>Last change: __DATE__</a> &bull; <a href="#citation" id="cite" onclick="revealCitation()">Cite</a> &bull; <a href="https://cispa.de/en/impressum" target=_blank>Imprint</a> </p> <script> function revealCitation() { var c = document.getElementById("citation"); c.style.display = "block"; } </script> """ chapter_footer_template = common_footer_template + chapter_citation_template site_footer_template = common_footer_template + site_citation_template from nbdepend import get_text_contents, get_title def get_description(notebook): """Return the first 2-4 sentences from a notebook file, after the title""" contents = get_text_contents(notebook) match = re.search(r'^# .*$([^#]*)^#', contents, re.MULTILINE) if match is None: desc = contents else: desc = match.group(1).replace(r'\n', '').replace('\n', '') desc = re.sub(r"\]\([^)]*\)", "]", desc).replace('[', '').replace(']', '') desc = re.sub(r"[_*]", "", desc) # print("Description", desc.encode('utf-8')) return desc def get_sections(notebook): """Return the section titles from a notebook file""" contents = get_text_contents(notebook) matches = re.findall(r'^(# .*)', contents, re.MULTILINE) if len(matches) >= 5: # Multiple top sections (book?) - use these pass else: # Use sections and subsections instead matches = re.findall(r'^(###? .*)', contents, re.MULTILINE) sections = [match.replace(r'\n', '') for match in matches] # print("Sections", repr(sections).encode('utf-8')) # Filter out second synopsis section if '## Synopsis' in sections: sections = ['## Synopsis'] + [sec for sec in sections if sec != '## Synopsis'] # Filter out "End of Excursion" titles sections = [sec for sec in sections if sec != '## End of Excursion' and sec != '### End of Excursion'] return sections def anchor(title): """Return an anchor '#a-title' for a title 'A title'""" return '#' + title.replace(' ', '-') def decorate(section, depth): if depth != 2: return section if section == "Synopsis": section = '<i class="fa fa-fw fa-map"></i> ' + section elif section == "Lessons Learned": section = '<i class="fa fa-fw fa-trophy"></i> ' + section elif section == "Next Steps": section = '<i class="fa fa-fw fa-arrows"></i> ' + section elif section == "Background": section = '<i class="fa fa-fw fa-mortar-board"></i> ' + section elif section == "Exercises": section = '<i class="fa fa-fw fa-edit"></i> ' + section else: section = '&nbsp;&bull;&nbsp;&nbsp; ' + section return section # Authors def bibtex_escape(authors): """Return list of authors in BibTeX-friendly form""" tex_escape_table = { "ä": r'{\"a}', "ö": r'{\"o}', "ü": r'{\"u}', "Ä": r'{\"A}', "Ö": r'{\"O}', "Ü": r'{\"U}', "ß": r'{\ss}' } return "".join(tex_escape_table.get(c,c) for c in authors) assert bibtex_escape("Böhme") == r'B{\"o}hme' authors_bibtex = bibtex_escape(authors).replace(", and ", " and ").replace(", ", " and ") # The other way round # Use "grep '\\' BIBFILE" to see accents currently in use def bibtex_unescape(contents): """Fix TeX escapes introduced by BibTeX""" tex_unescape_table = { r'{\"a}': "ä", r'{\"o}': "ö", r'{\"u}': "ü", r'{\"i}': "ï", r'{\"e}': "ë", r'{\"A}': "Ä", r'{\"O}': "Ö", r'{\"U}': "Ü", r'{\ss}': "ß", r'{\`e}': "è", r'{\'e}': "é", r'{\`a}': "à", r'{\'a}': "á", r'{\`i}': "ì", r'{\'i}': "í", r'{\`o}': "ò", r'{\'o}': "ó", r'{\`u}': "ù", r'{\'u}': "ú", r'{\d{s}}': "ṣ", r'{\d{n}}': "ṇ", r'{\d{t}}': "ṭ", r'{\=a}': "ā", r'{\=i}': "ī" } for key in tex_unescape_table: contents = contents.replace(key, tex_unescape_table[key]) return contents assert bibtex_unescape(r"B{\"o}hme") == 'Böhme' assert bibtex_unescape(r"P{\`e}zze") == 'Pèzze' LINKS_DB = 'links' links_db = shelve.open(LINKS_DB) if args.clear_link_cache: for link in links_db.keys(): del links_db[link] def link_exists(link): """Return True if http/https `link` exists""" if link in links_db: # Seen before return True try: urllib.request.urlopen(link, timeout=5) except urllib.error.HTTPError as exc: if exc.code == 403: # We get this when accessing readthedocs.io pass else: print(f"Cannot open {link}: {exc}", file=sys.stderr) link = None except urllib.error.URLError as exc: print(f"Cannot open {link}: {exc}", file=sys.stderr) link = None except UnicodeError as exc: # We get this when accessing readthedocs.io pass if not link: return False links_db[link] = True return True # Imports are in <span class="nn">NAME</span> RE_IMPORT = re.compile(r'<span class="nn">([^<]+)</span>') # Add links to imports def add_links_to_imports(contents, html_file): imports = re.findall(RE_IMPORT, contents) for module in imports: link = None if module.startswith("bookutils"): link = f"{github_html}/tree/master/notebooks/shared/bookutils" elif module == "requests": link = "http://docs.python-requests.org/en/master/" elif module.startswith("IPython"): # Point to IPython doc link = f"https://ipython.readthedocs.io/en/stable/api/generated/{module}.html" elif module.startswith("selenium"): # Point to Selenium doc link = "https://selenium-python.readthedocs.io/" elif module.startswith(project): # Point to notebook link = module[module.find('.') + 1:] + '.html' elif module in ['debuggingbook', 'fuzzingbook']: link = f"https://www.{module}.org/" elif (module.startswith('debuggingbook') or module.startswith('fuzzingbook')): base = module[:module.find('.')] submodule = module[module.find('.') + 1:] link = f"https://www.{base}.org/html/{submodule}.html" elif module.startswith('astor'): link = f'https://astor.readthedocs.io/' elif module.startswith('pydriller'): link = f'https://pydriller.readthedocs.io/' elif module.startswith('ipywidgets'): link = f'https://ipywidgets.readthedocs.io/' elif module.startswith('graphviz'): link = f'https://graphviz.readthedocs.io/' elif module in ['git', 'git.exc']: link = f'https://gitpython.readthedocs.io/' elif module in ['enforce', 'showast']: link = f'https://pypi.org/project/{module}/' elif module == 'magic': link = 'https://pypi.org/project/python-magic/' elif module == 'diff_match_patch': link = 'https://github.com/google/diff-match-patch' elif module == 'easyplotly': link = 'https://mwouts.github.io/easyplotly/' elif module == 'numpy': link = 'https://numpy.org/' elif module.startswith('matplotlib'): link = 'https://matplotlib.org/' elif module.startswith('plotly'): link = 'https://plotly.com/python/' elif module.startswith('sklearn'): link = 'https://scikit-learn.org/' elif module in ['ep', 'go', 'plt', 'np']: link = None # aliases elif module == 'cProfile': link = 'https://docs.python.org/3/library/profile.html' elif module[0].islower(): # Point to Python doc link = f'https://docs.python.org/3/library/{module}.html' else: # Point to (local) notebook link = f'{module}.html' # print(f'{module} -> ', repr(link)) if link: if link.startswith('http') and not link_exists(link): print(f"{html_file}: Cannot find link {link} for {repr(module)}", file=sys.stderr) else: contents = contents.replace(r'<span class="nn">' + module + r'</span>', r'<span class="nn"><a href="' + link + r'" class="import" target="_blank">' + module + r"</a>" + r'</span>') return contents # Remove cells that start with `# ignore`, `# docassert`, or only contain # a quiz() or a display() call. Keep the output. RE_IGNORE = re.compile(r''' <div class="input_code"> <div class="cell border-box-sizing code_cell rendered"> <div class="input"> <div class="inner_cell"> <div class="input_area"> <div class=" highlight hl-ipython3"><pre><span></span>(<span class="n">(quiz|display)</span>|<span class="c1">#\s*(ignore|docassert)[^<]*</span>).*? </div> </div></div> </div> </div> ''', re.DOTALL) def remove_ignored_code(text): return RE_IGNORE.sub('', text) assert remove_ignored_code(''' <div class="input_code"> <div class="cell border-box-sizing code_cell rendered"> <div class="input"> <div class="inner_cell"> <div class="input_area"> <div class=" highlight hl-ipython3"><pre><span></span><span class="n">quiz</span><span class="p">(</span><span class="s2">&quot;From the difference between success and failure, we can already devise some observations about what&#39;s wrong with the output. Which of these can we turn into general hypotheses?&quot;</span><span class="p">,</span> <span class="p">[</span><span class="s2">&quot;Double quotes are stripped from the tagged input.&quot;</span><span class="p">,</span> <span class="s2">&quot;Tags in double quotes are not stripped.&quot;</span><span class="p">,</span> <span class="s2">&quot;The tag &#39;&amp;lt;b&amp;gt;&#39; is always stripped from the input.&quot;</span><span class="p">,</span> <span class="s2">&quot;Four-letter words are stripped.&quot;</span><span class="p">],</span> <span class="p">[</span><span class="mi">298</span> <span class="o">%</span> <span class="mi">33</span><span class="p">,</span> <span class="mi">1234</span> <span class="o">%</span> <span class="mi">616</span><span class="p">])</span> </pre></div> </div> </div></div> </div> </div> ''') == '' # Remove `# type: ignore` comments RE_TYPE_IGNORE = re.compile(r' <span class="c1"># type: ignore</span>') def remove_type_ignore(text): return RE_TYPE_IGNORE.sub('', text) # Sharing def cgi_escape(text): """Produce entities within text.""" cgi_escape_table = { " ": r"%20", "&": r"%26", '"': r"%22", "'": r"%27", ">": r"%3e", "<": r"%3c", ":": r"%3a", "/": r"%2f", "?": r"%3f", "=": r"%3d", } return "".join(cgi_escape_table.get(c,c) for c in text) # Highlight Synopsis def highlight_synopsis(text): synopsis_start = text.find('<h2 id="Synopsis">') if synopsis_start < 0: return text # No synopsis synopsis_end = text.find('<div class="input_markdown">', synopsis_start + 1) if synopsis_end < 0: return text # No synopsis text = (text[:synopsis_start] + '<div class="synopsis">' + text[synopsis_start:synopsis_end] + '</div>\n\n' + text[synopsis_end:]) # Strip original synopsis orig_synopsis_start = text.find('<h2 id="Synopsis">', synopsis_end + 1) orig_synopsis_end = text.find('<h2 ', orig_synopsis_start + 1) text = (text[:orig_synopsis_start] + text[orig_synopsis_end:]) return text # Fix CSS def fix_css(text): # Avoid forcing text color to black when printing return text.replace('color: #000 !important;', '') # Inline our SVG graphics RE_IMG_SVG = re.compile(r'<img src="(PICS/[^"]*.svg)"[^>]*>') def inline_svg_graphics(text, chapter_html_file): while True: match = RE_IMG_SVG.search(text) if not match: break src = match.group(1) svg_file = os.path.join(os.path.dirname(chapter_html_file), src) svg_data = open(svg_file).read() text = text[:match.start()] + svg_data + text[match.end():] return text # Handle Excursions # Cells with "Excursion: <summary>" and "End of Excursion" are translated to # HTML <details> regions RE_BEGIN_EXCURSION = re.compile(r''' <div[^>]*?>[^<]*? # four divs <div[^>]*?>[^<]*? <div[^>]*?>[^<]*? <div[^>]*?>[^<]*? <h[0-9]\s*?(id="(?P<id>[^"]*)")[^>]*>Excursion:\s*\s(?P<title>[^\n]*?)(<a[^\n]*?>[^\n]*?</a>)?</h[0-9]> </div>[^<]*? # four closing divs </div>[^<]*? </div>[^<]*? </div>''', re.DOTALL | re.VERBOSE) RE_END_EXCURSION = re.compile(r''' <div[^>]*?>[^<]*? # four divs <div[^>]*?>[^<]*? <div[^>]*?>[^<]*? <div[^>]*?>[^<]*? <h[0-9][^<>]*?>[eE]nd[^\n]*[eE]xcursion[^\n]*</h[0-9]> </div>[^<]*? # four closing divs </div>[^<]*? </div>[^<]*? </div>''', re.DOTALL | re.VERBOSE) def add_excursion_switchers(text): text = RE_BEGIN_EXCURSION.sub( r'<details id="\g<id>">\n<summary>\g<title></summary>', text) text = RE_END_EXCURSION.sub( '</details>', text) return text text1 = ''' Some stuff to begin with <div class="input_markdown"> <div class="cell border-box-sizing text_cell rendered"> <div class="inner_cell"> <div class="text_cell_render border-box-sizing rendered_html"><h4 id="Excursion:-Implementing-display_tree()">Excursion: Implementing <code>display_tree()</code><a class="anchor-link" href="#Excursion:-Implementing-display_tree()">&#182;</a></h4></div> </div> </div> </div> <div class="input_markdown"> <div class="cell border-box-sizing text_cell rendered"> <div class="inner_cell"> <div class="text_cell_render border-box-sizing rendered_html"><p>We use the <code>dot</code> drawing program from the <code>graphviz</code> package algorithmically, traversing the above structure. (Unless you're deeply interested in tree visualization, you can directly skip to the example below.)</p> </div> </div> </div> </div> <div class="input_markdown"> <div class="cell border-box-sizing text_cell rendered"> <div class="inner_cell"> <div class="text_cell_render border-box-sizing rendered_html"><h4 id="End-of-Excursion">End of Excursion<a class="anchor-link" href="#End-of-Excursion">&#182;</a></h4></div> </div> </div> </div> <div class="input_markdown"> <div class="cell border-box-sizing text_cell rendered"> <div class="inner_cell"> <div class="text_cell_render border-box-sizing rendered_html"><h4 id="Excursion:-Implementing-display_tree()">Excursion: Implementing <code>display_tree()</code> again<a class="anchor-link" href="#Excursion:-Implementing-display_tree()">&#182;</a></h4></div> </div> </div> </div> Some standard stuff <div class="input_markdown"> <div class="cell border-box-sizing text_cell rendered"> <div class="inner_cell"> <div class="text_cell_render border-box-sizing rendered_html"><h4 id="End-of-Excursion">End of Excursion<a class="anchor-link" href="#End-of-Excursion">&#182;</a></h4></div> </div> </div> </div> Some other stuff ''' # print(add_excursion_switchers(text1)) # sys.exit(0) # Get template elements chapter_html_file = args.chapter[0] chapter = os.path.splitext(os.path.basename(chapter_html_file))[0] chapter_notebook_file = os.path.join("notebooks", chapter + ".ipynb") notebook_modification_time = os.path.getmtime(chapter_notebook_file) notebook_modification_datetime = datetime.datetime.fromtimestamp(notebook_modification_time) \ .astimezone().isoformat(sep=' ', timespec='seconds') notebook_modification_year = repr(datetime.datetime.fromtimestamp(notebook_modification_time).year) # Get list of chapters if args.public_chapters is not None: public_chapters = args.public_chapters.split() else: public_chapters = [] if args.all_chapters is not None: all_chapters = args.all_chapters.split() else: all_chapters = [] if args.include_ready and args.ready_chapters is not None: ready_chapters = args.ready_chapters.split() else: ready_chapters = [] if args.include_todo and args.todo_chapters is not None: todo_chapters = args.todo_chapters.split() else: todo_chapters = [] new_chapters = args.new_chapters.split() beta_chapters = ready_chapters + todo_chapters include_beta = args.include_ready or args.include_todo new_suffix = ' <strong class="new_chapter">&bull;</strong>' todo_suffix = '<i class="fa fa-fw fa-wrench"></i>' ready_suffix = '<i class="fa fa-fw fa-warning"></i>' booktitle_beta = booktitle if include_beta: booktitle_beta += "&nbsp;" + todo_suffix menu_prefix = args.menu_prefix if menu_prefix is None: menu_prefix = "" if args.home: header_template = site_header_template footer_template = site_footer_template else: header_template = chapter_header_template footer_template = chapter_footer_template # Popup menus twitter_onclick = r""" onclick="window.open('__SHARE_TWITTER__','popup','width=600,height=600'); return false;" """ facebook_onclick = r""" onclick="window.open('__SHARE_FACEBOOK__','popup','width=600,height=600'); return false;" """ if args.home: # Including the Twitter timeline already creates a popup twitter_onclick = "" # Set base names if include_beta: site_html += "beta/" # Book image bookimage = site_html + "html/PICS/wordcloud.png" # Binder if include_beta: notebook_html += "beta/" notebook_html += "notebooks/" # Construct sections menu basename = os.path.splitext(os.path.basename(chapter_html_file))[0] chapter_ipynb_file = os.path.join("notebooks", basename + ".ipynb") all_sections_menu = "" sections = get_sections(chapter_ipynb_file) current_depth = 1 for section in sections: depth = section.count('#') while section.startswith('#') or section.startswith(' '): section = section[1:] if section.startswith('['): section = section[1:section.find(']')] if depth == current_depth: all_sections_menu += '</li>' if depth > current_depth: all_sections_menu += "<ul>" * (depth - current_depth) if depth < current_depth: all_sections_menu += "</ul></li>" * (current_depth - depth) all_sections_menu += '<li class="has-sub"><a href="%s">%s</a>\n' % (anchor(section), decorate(section, depth)) current_depth = depth while current_depth > 1: all_sections_menu += '</ul></li>' current_depth -= 1 # Construct chapter menu if args.home: chapter_html = site_html chapter_notebook_ipynb = notebook_html + "00_Table_of_Contents.ipynb" else: chapter_html = site_html + "html/" + basename + ".html" chapter_notebook_ipynb = notebook_html + basename + ".ipynb" chapter_title = get_title(chapter_ipynb_file) # if chapter_ipynb_file in new_chapters: # chapter_title += " " + new_suffix chapter_title_beta = chapter_title is_todo_chapter = include_beta and chapter_ipynb_file in todo_chapters is_ready_chapter = include_beta and chapter_ipynb_file in ready_chapters if is_todo_chapter: chapter_title_beta += " " + todo_suffix # if is_ready_chapter: # chapter_title_beta += " " + ready_suffix if args.home: link_class = ' class="this_page"' else: link_class = '' all_chapters_menu = ''' <li><a href="%s"%s><span class="part_number"><i class="fa fa-fw fa-home"></i></span> About this book</a></li> <li><a href="__SITE_HTML__html/00_Table_of_Contents.html"><i class="fa fa-fw fa-sitemap"></i></span> Sitemap</a></li> ''' % (site_html, link_class) structured_all_chapters_menu = all_chapters_menu this_chapter_counter = 1 for counter, menu_ipynb_file in enumerate(all_chapters): if menu_ipynb_file == chapter_ipynb_file: this_chapter_counter = counter in_sublist = False for counter, menu_ipynb_file in enumerate(all_chapters): basename = os.path.splitext(os.path.basename(menu_ipynb_file))[0] structured_title = '' # '<span class="chnum">' + repr(counter + 1) + '</span> ' title = "" is_public = menu_ipynb_file in public_chapters if menu_ipynb_file == chapter_ipynb_file: link_class = ' class="this_page"' elif not is_public: link_class = ' class="not_public"' else: link_class = '' file_title = get_title(menu_ipynb_file) if menu_ipynb_file in new_chapters: file_title += new_suffix is_part = file_title.startswith("Part ") or file_title.startswith("Append") if file_title.startswith("Part "): file_title = '<span class="part_number">' + \ file_title.replace("Part ", "") \ .replace(":", '</span>') # .replace("I:", '&#x2160;') \ # .replace("II:", '&#x2161;') \ # .replace("III:", '&#x2162;') \ # .replace("IV:", '&#x2163;') \ # .replace("V:", '&#x2164;') \ # .replace("VI:", '&#x2165;') \ # .replace("VII:", '&#x2166;') \ # .replace("VIII:", '&#x2167;') \ # .replace("IX:", '&#x2168;') \ # .replace("X:", '&#x2169;') \ # .replace("XI:", '&#x216a;') \ # .replace("XII:", '&#x216b;') \ # .replace(';', ';</span>') \ title += file_title structured_title += file_title beta_indicator = '' # if menu_ipynb_file in ready_chapters: # beta_indicator = "&nbsp;" + ready_suffix if menu_ipynb_file in todo_chapters: beta_indicator = "&nbsp;" + todo_suffix menu_html_file = menu_prefix + basename + ".html" if is_part: # New part if in_sublist: structured_all_chapters_menu += "</ul>" in_sublist = False structured_all_chapters_menu += \ '<li class="has-sub"><a href="%s" class="chapters">%s%s' \ % (menu_html_file, file_title, beta_indicator) structured_all_chapters_menu += ' <i class="fa fa-fw fa-caret-right"></i></a>\n<ul>\n' in_sublist = True else: # New chapter menu_link = menu_html_file if is_public else "#" structured_item = '<li><a href="%s"%s>%s%s</a></li>\n' % \ (menu_link, link_class, structured_title, beta_indicator) structured_all_chapters_menu += structured_item item = '<li><a href="%s"%s>%s%s</a></li>\n' % \ (menu_link, link_class, title, beta_indicator) all_chapters_menu += item if in_sublist: structured_all_chapters_menu += "</ul>" in_sublist = False # Description description = html.escape(get_description(chapter_ipynb_file)) # Exercises end_of_exercise = ''' <p><div class="solution_link"><a href="__CHAPTER_NOTEBOOK_IPYNB__#Exercises" target=_blank>Use the notebook</a> to work on the exercises and see solutions.</div></p> ''' if args.home: share_message = (r'I just read "' + booktitle + rf'" ({twitter}) at ' + site_html) share_title = booktitle else: share_message = (r'I just read "' + chapter_title + rf'" (part of {twitter}) at ' + chapter_html) share_title = chapter_title share_twitter = "https://twitter.com/intent/tweet?text=" + cgi_escape(share_message) share_facebook = "https://www.facebook.com/sharer/sharer.php?u=" + cgi_escape(chapter_html) share_mail = ("mailto:?subject=" + cgi_escape(share_title) + "&body=" + cgi_escape(share_message)) # Page title if args.home: page_title = booktitle else: page_title = chapter_title + " - " + booktitle # sys.exit(0) # Read it in print("post_html.py: Reading", chapter_html_file) chapter_contents = open(chapter_html_file, encoding="utf-8").read() # Replacement orgy # 1. Replace all markdown links to .ipynb by .html, such that cross-chapter links work # 2. Fix extra newlines in cell output produced by ipypublish # 3. Insert the menus and templates as defined above chapter_contents = chapter_contents \ .replace("\n\n</pre>", "\n</pre>") \ .replace("<__HEADER__>", header_template) \ .replace("<__FOOTER__>", footer_template) \ .replace("<__ALL_CHAPTERS_MENU__>", all_chapters_menu) \ .replace("<__STRUCTURED_ALL_CHAPTERS_MENU__>", structured_all_chapters_menu) \ .replace("<__ALL_SECTIONS_MENU__>", all_sections_menu) \ .replace("<__END_OF_EXERCISE__>", end_of_exercise) \ .replace("__PROJECT__", project) \ .replace("__PAGE_TITLE__", page_title) \ .replace("__BOOKTITLE_BETA__", booktitle_beta) \ .replace("__BOOKTITLE__", booktitle) \ .replace("__BOOKIMAGE__", bookimage) \ .replace("__DESCRIPTION__", description) \ .replace("__AUTHORS__", authors) \ .replace("__CHAPTER__", chapter) \ .replace("__CHAPTER_TITLE__", chapter_title) \ .replace("__CHAPTER_TITLE_BETA__", chapter_title_beta) \ .replace("__CHAPTER_HTML__", chapter_html) \ .replace("__SITE_HTML__", site_html) \ .replace("__NOTEBOOK_HTML__", notebook_html) \ .replace("__CHAPTER_NOTEBOOK_IPYNB__", chapter_notebook_ipynb) \ .replace("__GITHUB_HTML__", github_html) \ .replace("__TWITTER_ONCLICK__", twitter_onclick) \ .replace("__FACEBOOK_ONCLICK__", facebook_onclick) \ .replace("__SHARE_TWITTER__", share_twitter) \ .replace("__SHARE_FACEBOOK__", share_facebook) \ .replace("__SHARE_MAIL__", share_mail) \ .replace("__DATE__", notebook_modification_datetime) \ .replace("__YEAR__", notebook_modification_year) \ .replace("__BIBTEX_KEY__", project + notebook_modification_year) # Remove code cells that only display graphics or start with `#ignore` chapter_contents = remove_ignored_code(chapter_contents) # Remove `# type: ignore` comments chapter_contents = remove_type_ignore(chapter_contents) # Add links to imports chapter_contents = add_links_to_imports(chapter_contents, chapter_html_file) # Inline SVG graphics (preserving style and tooltips) chapter_contents = inline_svg_graphics(chapter_contents, chapter_html_file) # Fix simple .ipynb links within text and XML if args.home: chapter_contents = re.sub(r'<a (xlink:href|href)="([a-zA-Z0-9_]*)\.ipynb', r'<a \1="html/\2.html', chapter_contents) else: chapter_contents = re.sub(r'<a (xlink:href|href)="([a-zA-Z0-9_]*)\.ipynb', r'<a \1="\2.html', chapter_contents) # Recode TeX accents imported from .bib chapter_contents = bibtex_unescape(chapter_contents) # Expand BibTeX authors at the end, because Marcel needs his Umlaut encoded chapter_contents = \ chapter_contents.replace("__AUTHORS_BIBTEX__", authors_bibtex) # Highlight details switchers chapter_contents = add_excursion_switchers(chapter_contents) # Fix CSS chapter_contents = fix_css(chapter_contents) # Handle the (first) synopsis chapter_contents = highlight_synopsis(chapter_contents) # Get proper links for CSS and Favicon if args.home: chapter_contents = chapter_contents.replace("custom.css", menu_prefix + "custom.css") chapter_contents = chapter_contents.replace("favicon/", menu_prefix + "favicon/") # Get a title # The official way is to set a title in document metadata, # but a) Jupyter Lab can't edit it, and b) the title conflicts with the chapter header - AZ chapter_contents = re.sub(r"<title>.*</title>", "<title>" + page_title + "</title>", chapter_contents, 1) beta_warning = None if is_todo_chapter: beta_warning = '<p><em class="beta">' + todo_suffix + '&nbsp;This chapter is work in progress ("beta"). It is incomplete and may change at any time.</em></p>' elif is_ready_chapter: beta_warning = '<p><em class="beta">' + ready_suffix + '&nbsp;This chapter is still under review ("beta"). It may change at any time.</em></p>' if beta_warning is not None: chapter_contents = chapter_contents.replace("</h1>", "</h1>" + beta_warning) # And write it out again print("post_html.py: Writing", chapter_html_file) open(chapter_html_file, mode="w", encoding="utf-8").write(chapter_contents) links_db.close()
37.121553
346
0.642397
4a14fcd1d64c2a907afe2851b6a4be1b98fc06db
5,387
py
Python
tests/generator/test_rom.py
wallentx/chia-blockchain
6bc8a3e204a4f49813093e5b3a99740fddc54a93
[ "Apache-2.0" ]
2
2022-01-01T18:52:46.000Z
2022-01-06T17:24:02.000Z
tests/generator/test_rom.py
wallentx/chia-blockchain
6bc8a3e204a4f49813093e5b3a99740fddc54a93
[ "Apache-2.0" ]
33
2021-09-28T10:17:59.000Z
2022-03-29T10:13:18.000Z
tests/generator/test_rom.py
rob-opsi/chia-blockchain
536d90f3b2d3be6ea78ce64528d04dcaba08fcb4
[ "Apache-2.0" ]
null
null
null
from clvm_tools import binutils from clvm_tools.clvmc import compile_clvm_text from chia.full_node.generator import run_generator_unsafe from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions from chia.types.blockchain_format.program import Program, SerializedProgram from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.condition_with_args import ConditionWithArgs from chia.types.name_puzzle_condition import NPC from chia.types.generator_types import BlockGenerator, GeneratorArg from chia.util.clvm import int_to_bytes from chia.util.condition_tools import ConditionOpcode from chia.util.ints import uint32 from chia.wallet.puzzles.load_clvm import load_clvm MAX_COST = int(1e15) COST_PER_BYTE = int(12000) DESERIALIZE_MOD = load_clvm("chialisp_deserialisation.clvm", package_or_requirement="chia.wallet.puzzles") GENERATOR_CODE = """ (mod (deserialize-mod historical-generators) (defun first-block (deserialize-mod historical-generators) (a deserialize-mod (list (f historical-generators)))) (defun second-block (deserialize-mod historical-generators) (a deserialize-mod (r historical-generators))) (defun go (deserialize-mod historical-generators) (c (first-block deserialize-mod historical-generators) (second-block deserialize-mod historical-generators) )) (go deserialize-mod historical-generators) ) """ COMPILED_GENERATOR_CODE = bytes.fromhex( "ff02ffff01ff04ffff02ff04ffff04ff02ffff04ff05ffff04ff0bff8080808080ffff02" "ff06ffff04ff02ffff04ff05ffff04ff0bff808080808080ffff04ffff01ffff02ff05ff" "1380ff02ff05ff2b80ff018080" ) COMPILED_GENERATOR_CODE = bytes(Program.to(compile_clvm_text(GENERATOR_CODE, []))) FIRST_GENERATOR = Program.to( binutils.assemble('((parent_id (c 1 (q "puzzle blob")) 50000 "solution is here" extra data for coin))') ).as_bin() SECOND_GENERATOR = Program.to(binutils.assemble("(extra data for block)")).as_bin() FIRST_GENERATOR = Program.to( binutils.assemble( """ ((0x0000000000000000000000000000000000000000000000000000000000000000 1 50000 ((51 0x0000000000000000000000000000000000000000000000000000000000000001 500)) "extra" "data" "for" "coin" ))""" ) ).as_bin() SECOND_GENERATOR = Program.to(binutils.assemble("(extra data for block)")).as_bin() def to_sp(sexp) -> SerializedProgram: return SerializedProgram.from_bytes(bytes(sexp)) def block_generator() -> BlockGenerator: generator_args = [GeneratorArg(uint32(0), to_sp(FIRST_GENERATOR)), GeneratorArg(uint32(1), to_sp(SECOND_GENERATOR))] return BlockGenerator(to_sp(COMPILED_GENERATOR_CODE), generator_args) EXPECTED_ABBREVIATED_COST = 108379 EXPECTED_COST = 113415 EXPECTED_OUTPUT = ( "ffffffa00000000000000000000000000000000000000000000000000000000000000000" "ff01ff8300c350ffffff33ffa00000000000000000000000000000000000000000000000" "000000000000000001ff8201f48080ff856578747261ff8464617461ff83666f72ff8463" "6f696e8080ff856578747261ff8464617461ff83666f72ff85626c6f636b80" ) class TestROM: def test_rom_inputs(self): # this test checks that the generator just works # It's useful for debugging the generator prior to having the ROM invoke it. args = Program.to([DESERIALIZE_MOD, [FIRST_GENERATOR, SECOND_GENERATOR]]) sp = to_sp(COMPILED_GENERATOR_CODE) cost, r = sp.run_with_cost(MAX_COST, args) assert cost == EXPECTED_ABBREVIATED_COST assert r.as_bin().hex() == EXPECTED_OUTPUT def test_get_name_puzzle_conditions(self): # this tests that extra block or coin data doesn't confuse `get_name_puzzle_conditions` gen = block_generator() cost, r = run_generator_unsafe(gen, max_cost=MAX_COST) print(r) npc_result = get_name_puzzle_conditions(gen, max_cost=MAX_COST, cost_per_byte=COST_PER_BYTE, mempool_mode=False) assert npc_result.error is None assert npc_result.clvm_cost == EXPECTED_COST cond_1 = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bytes([0] * 31 + [1]), int_to_bytes(500), b""]) CONDITIONS = [ (ConditionOpcode.CREATE_COIN, [cond_1]), ] npc = NPC( coin_name=bytes32.fromhex("e8538c2d14f2a7defae65c5c97f5d4fae7ee64acef7fec9d28ad847a0880fd03"), puzzle_hash=bytes32.fromhex("9dcf97a184f32623d11a73124ceb99a5709b083721e878a16d78f596718ba7b2"), conditions=CONDITIONS, ) assert npc_result.npc_list == [npc] def test_coin_extras(self): # the ROM supports extra data after a coin. This test checks that it actually gets passed through gen = block_generator() cost, r = run_generator_unsafe(gen, max_cost=MAX_COST) coin_spends = r.first() for coin_spend in coin_spends.as_iter(): extra_data = coin_spend.rest().rest().rest().rest() assert extra_data.as_atom_list() == b"extra data for coin".split() def test_block_extras(self): # the ROM supports extra data after the coin spend list. This test checks that it actually gets passed through gen = block_generator() cost, r = run_generator_unsafe(gen, max_cost=MAX_COST) extra_block_data = r.rest() assert extra_block_data.as_atom_list() == b"extra data for block".split()
39.610294
120
0.745684
4a14fdd7ac81cd25b40164be031f30fa4c2b4fea
2,785
py
Python
src/budget_corpus.py
j-carson/nlp-budget
82f135c6f86efc603e3e5d94eb9f6e92fc8595dd
[ "MIT" ]
1
2022-01-17T20:25:06.000Z
2022-01-17T20:25:06.000Z
src/budget_corpus.py
j-carson/nlp-budget
82f135c6f86efc603e3e5d94eb9f6e92fc8595dd
[ "MIT" ]
null
null
null
src/budget_corpus.py
j-carson/nlp-budget
82f135c6f86efc603e3e5d94eb9f6e92fc8595dd
[ "MIT" ]
null
null
null
# coding: utf-8 # # Common code to read and process the corpus # # To make sure all models are preprocessing the data in a similar way import pandas as pd import numpy as np import pdb import json import re import pickle from pathlib import Path import gensim from gensim.utils import simple_preprocess from gensim.utils import lemmatize from gensim.parsing.preprocessing import STOPWORDS as gs_stopwords docdir = Path('../data/docs') datadir = Path('../data') def read_raw_corpus(): raw_corpus = [] files = list(docdir.glob('*.body')) for body in files: with open(body, 'r') as fp: doc = fp.read() fp.close() raw_corpus.append(doc) return raw_corpus def tokenize_raw_budget(raw_corpus): # -- stemming would reduce some duplication here (e.g. section and sections) # -- but that results in a lot of non-words that don't work with word2vec # -- (Actually the lemmatizer may be fixing some of these, could go back # -- and check. simple_precprocess was leaving the plurals when I wrote this # -- list.) stopwords = set( """section sections subsection subsections chapter part amended state federal local district grant amount amounts fund funding cost expense expend expenditure purchase account fiscal pay payment payments provide provision proviso administration administrative administrator budget purposes united states national general government office regulations act acts title code specified provided available further including herein enactment program service operation operations activity appropriation appropriated provision provisions department agency those aaa bbb ccc ddd eee""".split() ) stopwords.update(gs_stopwords) # tla = three (or more) letter all-capitialized acronymns tla = re.compile(r'[A-Z]{3,}\S*?\b') corpus = [] for doc in raw_corpus: body = re.sub(tla, '', doc) # from this lemmatization tutorial # https://www.machinelearningplus.com/nlp/lemmatization-examples-python/ lemmatized = [wd.decode('utf-8').split('/')[0] for wd in lemmatize(body, min_length=3)] tokens = [ t for t in lemmatized if t not in stopwords ] corpus.append(tokens) return corpus def read_documents(): doc_pickle = datadir / 'corpus.pkl' if doc_pickle.exists(): with open(doc_pickle, 'rb') as fp: corpus = pickle.load(fp) else: raw_corpus = read_raw_corpus() corpus = tokenize_raw_budget(raw_corpus) # put it in a pickle file with open(doc_pickle, 'wb') as fp: pickle.dump(corpus, fp) return corpus
30.604396
96
0.668223
4a14fe6b97a7b16dc332d0f6b226980ee0b33a1e
146
py
Python
server.py
afktrust2/flask-template-files
2045068e9b0f82c02082a0a3ea9f34ed138a1d2d
[ "MIT" ]
null
null
null
server.py
afktrust2/flask-template-files
2045068e9b0f82c02082a0a3ea9f34ed138a1d2d
[ "MIT" ]
null
null
null
server.py
afktrust2/flask-template-files
2045068e9b0f82c02082a0a3ea9f34ed138a1d2d
[ "MIT" ]
null
null
null
from flask_app import app from flask_app.controllers import users if __name__ == "__main__": app.run(debug=True, host='localhost', port=8000)
29.2
52
0.760274
4a14fe7ab5ad92ce093d1aa58e0c4bc0dc3c553e
157
py
Python
tf_rnn/layers/__init__.py
ffrankies/tf-rnn
23400d4deb775841a1b8aae2831c09cc043b8263
[ "MIT" ]
1
2018-07-16T19:31:49.000Z
2018-07-16T19:31:49.000Z
tf_rnn/layers/__init__.py
ffrankies/tf-rnn
23400d4deb775841a1b8aae2831c09cc043b8263
[ "MIT" ]
25
2018-01-27T14:18:05.000Z
2018-05-21T16:21:09.000Z
tf_rnn/layers/__init__.py
ffrankies/tf-rnn
23400d4deb775841a1b8aae2831c09cc043b8263
[ "MIT" ]
null
null
null
"""Contains functions for simplifying RNN layers written with tensorflow 1.3 @since 0.6.1 """ from .performance_layer import PerformancePlaceholders
22.428571
77
0.770701
4a14fe9ea4f459c4854ab30e632128c22bc74c81
7,787
py
Python
Libraries/Python/CommonBoost/v1.0/CommonBoost/BoostBuildImpl.py
davidbrownell/Common_cpp_boost_Common
79bdf1c5337c67ca153733e937e96a492d3a82f0
[ "BSL-1.0" ]
null
null
null
Libraries/Python/CommonBoost/v1.0/CommonBoost/BoostBuildImpl.py
davidbrownell/Common_cpp_boost_Common
79bdf1c5337c67ca153733e937e96a492d3a82f0
[ "BSL-1.0" ]
null
null
null
Libraries/Python/CommonBoost/v1.0/CommonBoost/BoostBuildImpl.py
davidbrownell/Common_cpp_boost_Common
79bdf1c5337c67ca153733e937e96a492d3a82f0
[ "BSL-1.0" ]
null
null
null
# ---------------------------------------------------------------------- # | # | BoostBuildImpl.py # | # | David Brownell <db@DavidBrownell.com> # | 2019-04-17 15:14:35 # | # ---------------------------------------------------------------------- # | # | Copyright David Brownell 2019-21 # | Distributed under the Boost Software License, Version 1.0. See # | accompanying file LICENSE_1_0.txt or copy at # | http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- """Contains Build and Clean methods that build boost""" import os import sys import CommonEnvironment from CommonEnvironment.CallOnExit import CallOnExit from CommonEnvironment import CommandLine from CommonEnvironment import FileSystem from CommonEnvironment import Process from CommonEnvironment.Shell.All import CurrentShell from CommonEnvironment.StreamDecorator import StreamDecorator # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- # ---------------------------------------------------------------------- def CreateBuild(boost_root, is_standard_configuration): boost_libs = ["iostreams", "regex", "serialization"] # ---------------------------------------------------------------------- @CommandLine.EntryPoint @CommandLine.Constraints( output_stream=None, ) def Build( output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: if is_standard_configuration: dm.stream.write( "This build is not active with the 'standard' configuration.\n", ) return dm.result # Build b2 (if necessary) dm.stream.write("Checking for 'b2'...") with dm.stream.DoneManager( suffix="\n", ) as this_dm: b2_filename = os.path.join( boost_root, CurrentShell.CreateExecutableName("b2"), ) if not os.path.isfile(b2_filename): this_dm.stream.write("Building 'b2'...") with this_dm.stream.DoneManager() as build_dm: prev_dir = os.getcwd() os.chdir(boost_root) with CallOnExit(lambda: os.chdir(prev_dir)): if CurrentShell.CategoryName == "Windows": bootstrap_name = "bootstrap.bat" command_line = bootstrap_name else: bootstrap_name = "bootstrap.sh" # Manually set the toolset compiler_name = os.getenv( "DEVELOPMENT_ENVIRONMENT_CPP_COMPILER_NAME", ).lower() if "clang" in compiler_name: toolset = "clang" else: build_dm.stream.write( "ERROR: '{}' is not a recognized compiler.\n".format( compiler_name, ), ) build_dm.result = -1 return build_dm.result command_line = "./{} --with-toolset={}".format( bootstrap_name, toolset, ) for filename in [ bootstrap_name, os.path.join("tools", "build", "bootstrap.sh"), os.path.join( "tools", "build", "src", "engine", "build.sh", ), ]: assert os.path.isfile(filename), filename CurrentShell.MakeFileExecutable(filename) build_dm.result, output = Process.Execute(command_line) if build_dm.result != 0: build_dm.stream.write(output) return build_dm.result # Build boost (if necessary) dm.stream.write("Building boost...") with dm.stream.DoneManager() as build_dm: prev_dir = os.getcwd() os.chdir(boost_root) architecture = os.getenv("DEVELOPMENT_ENVIRONMENT_CPP_ARCHITECTURE") with CallOnExit(lambda: os.chdir(prev_dir)): command_line = "b2 --build-type=complete --layout=versioned --build-dir=build/{architecture} --hash stage address-model={architecture} {libs}".format( architecture="64" if architecture == "x64" else "32", libs=" ".join( ["--with-{}".format(lib_name) for lib_name in boost_libs], ), ) if CurrentShell.CategoryName != "Windows": # TODO: Enable ASLR # command_line = './{} variant=release cxxflags="-fPIC -fpie" linkflags="-pie"'.format(command_line) command_line = "./{} ".format(command_line) build_dm.result = Process.Execute(command_line, build_dm.stream) if build_dm.result != 0: return build_dm.result return dm.result # ---------------------------------------------------------------------- return Build # ---------------------------------------------------------------------- def CreateClean(boost_root): # ---------------------------------------------------------------------- @CommandLine.EntryPoint @CommandLine.Constraints( output_stream=None, ) def Clean( force=False, output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: for subdir in ["stage", "build"]: this_dir = os.path.join(boost_root, subdir) if not os.path.isdir(this_dir): continue if not force: dm.stream.write( "Call this method with the '/force' flag to remove '{}'.\n".format( this_dir, ), ) continue dm.stream.write("Removing '{}'...".format(this_dir)) with dm.stream.DoneManager(): FileSystem.RemoveTree(this_dir) return dm.result # ---------------------------------------------------------------------- return Clean
40.34715
171
0.402851
4a14ff6c910fe067f0fccc141bd3e5c1dc5795ae
2,763
py
Python
src/fuzzingtool/factories/wordlist_factory.py
retr0-13/FuzzingTool
2fe34911abd583838b7859f83201cd474a2beefc
[ "MIT" ]
null
null
null
src/fuzzingtool/factories/wordlist_factory.py
retr0-13/FuzzingTool
2fe34911abd583838b7859f83201cd474a2beefc
[ "MIT" ]
null
null
null
src/fuzzingtool/factories/wordlist_factory.py
retr0-13/FuzzingTool
2fe34911abd583838b7859f83201cd474a2beefc
[ "MIT" ]
null
null
null
# Copyright (c) 2020 - present Vitor Oriel <https://github.com/VitorOriel> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from .base_factories import BaseWordlistFactory from .plugin_factory import PluginFactory from ..core.bases.base_wordlist import BaseWordlist from ..utils.http_utils import get_host, get_pure_url from ..conn.requesters.requester import Requester from ..core.defaults.wordlists import ListWordlist, FileWordlist from ..exceptions.main_exceptions import WordlistCreationError from ..exceptions.plugin_exceptions import InvalidPlugin, PluginCreationError class WordlistFactory(BaseWordlistFactory): @staticmethod def creator(name: str, params: str, requester: Requester) -> BaseWordlist: try: wordlist_cls = PluginFactory.class_creator(name, 'wordlists') except InvalidPlugin: if name.startswith('[') and name.endswith(']'): wordlist_obj = ListWordlist(name) else: # For default, read the wordlist from a file wordlist_obj = FileWordlist(name) else: if (not params and requester and wordlist_cls.__params__['metavar'] in ["TARGET_HOST", "TARGET_URL"]): if "TARGET_HOST" in wordlist_cls.__params__['metavar']: params = get_host(get_pure_url(requester.get_url())) elif "TARGET_URL" in wordlist_cls.__params__['metavar']: params = get_pure_url(requester.get_url()) try: wordlist_obj = PluginFactory.object_creator(name, 'wordlists', params) except PluginCreationError as e: raise WordlistCreationError(str(e)) return wordlist_obj
50.236364
86
0.709736
4a14ff82ea8833f45db27a3dbb77066c92653948
13,645
py
Python
tests/test_hpo.py
sbonner0/pykeen
df5beab80772a08d595ace1b39c7fd4df7164a39
[ "MIT" ]
750
2020-06-23T08:36:34.000Z
2022-03-30T22:53:18.000Z
tests/test_hpo.py
sbonner0/pykeen
df5beab80772a08d595ace1b39c7fd4df7164a39
[ "MIT" ]
691
2020-06-22T11:56:28.000Z
2022-03-31T16:07:32.000Z
tests/test_hpo.py
sbonner0/pykeen
df5beab80772a08d595ace1b39c7fd4df7164a39
[ "MIT" ]
122
2020-06-26T14:37:56.000Z
2022-03-23T08:25:22.000Z
# -*- coding: utf-8 -*- """Test hyper-parameter optimization.""" import tempfile import unittest from unittest.mock import MagicMock import optuna import pytest from optuna.trial import TrialState from pykeen.datasets.nations import ( NATIONS_TEST_PATH, NATIONS_TRAIN_PATH, NATIONS_VALIDATE_PATH, Nations, NationsLiteral, ) from pykeen.hpo import hpo_pipeline from pykeen.hpo.hpo import ExtraKeysError, suggest_kwargs from pykeen.trackers import ResultTracker, tracker_resolver from pykeen.triples import TriplesFactory class TestInvalidConfigurations(unittest.TestCase): """Tests of invalid HPO configurations.""" def test_earl_stopping_with_optimize_epochs(self): """Assert that the pipeline raises a value error.""" with self.assertRaises(ValueError): hpo_pipeline( dataset="kinships", model="transe", stopper="early", training_kwargs_ranges=dict(epochs=...), ) @pytest.mark.slow class TestHyperparameterOptimization(unittest.TestCase): """Test hyper-parameter optimization.""" def test_run(self): """Test simply making a study.""" hpo_pipeline_result = hpo_pipeline( dataset="nations", model="TransE", training_kwargs=dict(num_epochs=5, use_tqdm=False), n_trials=2, ) df = hpo_pipeline_result.study.trials_dataframe(multi_index=True) # Check a model param is optimized self.assertIn(("params", "model.embedding_dim"), df.columns) # Check a loss param is optimized self.assertIn(("params", "loss.margin"), df.columns) self.assertNotIn(("params", "training.num_epochs"), df.columns) def test_fail_invalid_kwarg_ranges(self): """Test that an exception is thrown if an incorrect argument is passed.""" with self.assertRaises(ExtraKeysError) as e: hpo_pipeline( dataset="Nations", model="TransE", n_trials=1, training_loop="sLCWA", training_kwargs=dict(num_epochs=5, use_tqdm=False), negative_sampler_kwargs_ranges=dict( garbage_key=dict(type=int, low=1, high=100), ), ) self.assertEqual(["garbage_key"], e.exception.args[0]) def test_specified_model_hyperparameter(self): """Test making a study that has a specified model hyper-parameter.""" target_embedding_dim = 50 hpo_pipeline_result = hpo_pipeline( dataset="nations", model="TransE", model_kwargs=dict(embedding_dim=target_embedding_dim), training_kwargs=dict(num_epochs=5, use_tqdm=False), n_trials=2, ) df = hpo_pipeline_result.study.trials_dataframe(multi_index=True) # Check a model param is NOT optimized self.assertNotIn(("params", "model.embedding_dim"), df.columns) # Check a loss param is optimized self.assertIn(("params", "loss.margin"), df.columns) def test_specified_loss_hyperparameter(self): """Test making a study that has a specified loss hyper-parameter.""" hpo_pipeline_result = hpo_pipeline( dataset="nations", model="TransE", loss_kwargs=dict(margin=1.0), training_kwargs=dict(num_epochs=5, use_tqdm=False), n_trials=2, ) df = hpo_pipeline_result.study.trials_dataframe(multi_index=True) # Check a model param is optimized self.assertIn(("params", "model.embedding_dim"), df.columns) # Check a loss param is NOT optimized self.assertNotIn(("params", "loss.margin"), df.columns) def test_specified_loss_and_model_hyperparameter(self): """Test making a study that has a specified loss hyper-parameter.""" target_embedding_dim = 50 hpo_pipeline_result = hpo_pipeline( dataset="nations", model="TransE", model_kwargs=dict(embedding_dim=target_embedding_dim), loss="MarginRankingLoss", loss_kwargs=dict(margin=1.0), training_kwargs=dict(num_epochs=5, use_tqdm=False), n_trials=2, ) df = hpo_pipeline_result.study.trials_dataframe(multi_index=True) # Check a model param is NOT optimized self.assertNotIn(("params", "model.embedding_dim"), df.columns) # Check a loss param is NOT optimized self.assertNotIn(("params", "loss.margin"), df.columns) def test_specified_range(self): """Test making a study that has a specified hyper-parameter.""" hpo_pipeline_result = hpo_pipeline( dataset="nations", model="TransE", model_kwargs_ranges=dict( embedding_dim=dict(type=int, low=60, high=80, q=10), ), loss_kwargs_ranges=dict( margin=dict(type=int, low=1, high=2), ), training_kwargs=dict(num_epochs=5, use_tqdm=False), n_trials=2, ) df = hpo_pipeline_result.study.trials_dataframe(multi_index=True) self.assertIn(("params", "model.embedding_dim"), df.columns) self.assertTrue(df[("params", "model.embedding_dim")].isin({60.0, 70.0, 80.0}).all()) self.assertIn(("params", "loss.margin"), df.columns) self.assertTrue(df[("params", "loss.margin")].isin({1, 2}).all()) def test_sampling_values_from_2_power_x(self): """Test making a study that has a range defined by f(x) = 2^x.""" model_kwargs_ranges = dict( embedding_dim=dict(type=int, low=0, high=4, scale="power_two"), ) objective = _test_suggest(model_kwargs_ranges) study = optuna.create_study() study.optimize(objective, n_trials=2) df = study.trials_dataframe(multi_index=True) self.assertIn(("params", "model.embedding_dim"), df.columns) self.assertTrue(df[("params", "model.embedding_dim")].isin({1, 2, 4, 8, 16}).all()) objective = _test_suggest(model_kwargs_ranges) with self.assertRaises(Exception) as context: study = optuna.create_study() study.optimize(objective, n_trials=2) self.assertIn("Upper bound 4 is not greater than lower bound 4.", context.exception) def test_sampling_values_from_power_x(self): """Test making a study that has a range defined by f(x) = base^x.""" kwargs_ranges = dict( embedding_dim=dict(type=int, low=0, high=2, scale="power", base=10), ) objective = _test_suggest(kwargs_ranges) study = optuna.create_study() study.optimize(objective, n_trials=2) df = study.trials_dataframe(multi_index=True) self.assertIn(("params", "model.embedding_dim"), df.columns) values = df[("params", "model.embedding_dim")] self.assertTrue(values.isin({1, 10, 100}).all(), msg=f"Got values: {values}") def test_failing_trials(self): """Test whether failing trials are correctly reported.""" class MockResultTracker(MagicMock, ResultTracker): """A mock result tracker.""" tracker_resolver.register(cls=MockResultTracker) mock_result_tracker = MockResultTracker() mock_result_tracker.end_run = MagicMock() result = hpo_pipeline( dataset="nations", model="distmult", model_kwargs_ranges=dict( embedding_dim=dict( type=int, low=-10, high=-1, # will fail ), ), n_trials=1, result_tracker=mock_result_tracker, ) # verify failure assert all(t.state == TrialState.FAIL for t in result.study.trials) assert all(ca[1]["success"] is False for ca in mock_result_tracker.end_run.call_args_list) def _test_suggest(kwargs_ranges): def objective(trial): suggest_kwargs(prefix="model", trial=trial, kwargs_ranges=kwargs_ranges) return 1.0 return objective @pytest.mark.slow class TestHPODatasets(unittest.TestCase): """Test different ways of loading data in HPO.""" def test_custom_dataset_instance(self): """Test passing a pre-instantiated dataset to HPO.""" hpo_pipeline_result = self._help_test_hpo( study_name="HPO with custom dataset instance", dataset=Nations(), # mock a "custom" dataset by using one already available ) # Since custom data was passed, we can't store any of this self.assertNotIn("dataset", hpo_pipeline_result.study.user_attrs) self.assertNotIn("training", hpo_pipeline_result.study.user_attrs) self.assertNotIn("testing", hpo_pipeline_result.study.user_attrs) self.assertNotIn("validation", hpo_pipeline_result.study.user_attrs) def test_custom_dataset_cls(self): """Test passing a dataset class to HPO.""" hpo_pipeline_result = self._help_test_hpo( study_name="HPO with custom dataset class", dataset=Nations, ) # currently, any custom data doesn't get stored. self.assertNotIn("dataset", hpo_pipeline_result.study.user_attrs) # self.assertEqual(Nations.get_normalized_name(), hpo_pipeline_result.study.user_attrs['dataset']) self.assertNotIn("training", hpo_pipeline_result.study.user_attrs) self.assertNotIn("testing", hpo_pipeline_result.study.user_attrs) self.assertNotIn("validation", hpo_pipeline_result.study.user_attrs) def test_custom_dataset_path(self): """Test passing a dataset class to HPO.""" hpo_pipeline_result = self._help_test_hpo( study_name="HPO with custom dataset path", dataset=NATIONS_TRAIN_PATH, ) self.assertIn("dataset", hpo_pipeline_result.study.user_attrs) self.assertEqual(str(NATIONS_TRAIN_PATH), hpo_pipeline_result.study.user_attrs["dataset"]) self.assertNotIn("training", hpo_pipeline_result.study.user_attrs) self.assertNotIn("testing", hpo_pipeline_result.study.user_attrs) self.assertNotIn("validation", hpo_pipeline_result.study.user_attrs) def test_custom_tf_object(self): """Test using a custom triples factories with HPO. .. seealso:: https://github.com/pykeen/pykeen/issues/230 """ tf = TriplesFactory.from_path(path=NATIONS_TRAIN_PATH) training, testing, validation = tf.split([0.8, 0.1, 0.1], random_state=0) hpo_pipeline_result = self._help_test_hpo( study_name="HPO with custom triples factories", training=training, testing=testing, validation=validation, ) self.assertNotIn("dataset", hpo_pipeline_result.study.user_attrs) # Since there's no source path information, these shouldn't be # added, even if it might be possible to infer path information # from the triples factories self.assertNotIn("training", hpo_pipeline_result.study.user_attrs) self.assertNotIn("testing", hpo_pipeline_result.study.user_attrs) self.assertNotIn("validation", hpo_pipeline_result.study.user_attrs) def test_custom_paths(self): """Test using a custom triples paths with HPO.""" hpo_pipeline_result = self._help_test_hpo( study_name="HPO with custom triples paths", training=NATIONS_TRAIN_PATH, # mock "custom" paths testing=NATIONS_TEST_PATH, validation=NATIONS_VALIDATE_PATH, ) self.assertNotIn("dataset", hpo_pipeline_result.study.user_attrs) # Since paths were passed for training, testing, and validation, # they should be stored as study-level attributes self.assertIn("training", hpo_pipeline_result.study.user_attrs) self.assertEqual(str(NATIONS_TRAIN_PATH), hpo_pipeline_result.study.user_attrs["training"]) self.assertIn("testing", hpo_pipeline_result.study.user_attrs) self.assertEqual(str(NATIONS_TEST_PATH), hpo_pipeline_result.study.user_attrs["testing"]) self.assertIn("validation", hpo_pipeline_result.study.user_attrs) self.assertEqual(str(NATIONS_VALIDATE_PATH), hpo_pipeline_result.study.user_attrs["validation"]) def _help_test_hpo(self, **kwargs): hpo_pipeline_result = hpo_pipeline( **kwargs, model="TransE", n_trials=1, training_kwargs=dict(num_epochs=1, use_tqdm=False), evaluation_kwargs=dict(use_tqdm=False), ) with tempfile.TemporaryDirectory() as directory: hpo_pipeline_result.save_to_directory(directory) return hpo_pipeline_result @pytest.mark.slow class TestHyperparameterOptimizationLiterals(unittest.TestCase): """Test hyper-parameter optimization.""" def test_run(self): """Test simply making a study.""" hpo_pipeline_result = hpo_pipeline( dataset=NationsLiteral, model="DistMultLiteral", training_kwargs=dict(num_epochs=5, use_tqdm=False), n_trials=2, ) df = hpo_pipeline_result.study.trials_dataframe(multi_index=True) # Check a model param is optimized self.assertIn(("params", "model.embedding_dim"), df.columns) # Check a loss param is optimized self.assertIn(("params", "loss.margin"), df.columns) self.assertNotIn(("params", "training.num_epochs"), df.columns)
41.984615
106
0.651081
4a1500655e813cc40c29d84f42539014d1b68f63
177
py
Python
divisor list.py
svikash/python-basic-program
e0542d98c2985c71cfbaaf3f09ccfdb747a9e2af
[ "MIT" ]
null
null
null
divisor list.py
svikash/python-basic-program
e0542d98c2985c71cfbaaf3f09ccfdb747a9e2af
[ "MIT" ]
null
null
null
divisor list.py
svikash/python-basic-program
e0542d98c2985c71cfbaaf3f09ccfdb747a9e2af
[ "MIT" ]
1
2018-10-01T06:04:27.000Z
2018-10-01T06:04:27.000Z
a=int(input("enter the number to check divisor")) #add the divisor to list and print list=[] for i in range (1,a): if a%i == 0: list.append(int(i)) print(list)
25.285714
50
0.615819
4a150168a3f3841a22b7c68c1af6b551f83c785e
1,242
py
Python
tests/node/agg/test_abstract.py
alkemics/panda
39e50d9b2408480ebf70c1a2d1a9fa12c55907c5
[ "Apache-2.0" ]
13
2020-03-03T20:59:08.000Z
2022-02-16T09:51:40.000Z
tests/node/agg/test_abstract.py
alkemics/panda
39e50d9b2408480ebf70c1a2d1a9fa12c55907c5
[ "Apache-2.0" ]
57
2020-06-21T18:48:20.000Z
2022-03-16T09:52:39.000Z
tests/node/agg/test_abstract.py
alkemics/panda
39e50d9b2408480ebf70c1a2d1a9fa12c55907c5
[ "Apache-2.0" ]
2
2020-03-03T20:59:20.000Z
2020-05-10T20:31:53.000Z
from pandagg.node.aggs.abstract import AggClause def test_abstract_agg_node(): class CustomAgg(AggClause): KEY = "custom_type" VALUE_ATTRS = ["bucket_value_path"] # would mean this agg can be applied only on boolean fields WHITELISTED_MAPPING_TYPES = ["boolean"] BLACKLISTED_MAPPING_TYPES = None # example for unique bucket agg def extract_buckets(self, response_value): yield (None, response_value) node = CustomAgg(custom_body={"stuff": 2}) assert node.to_dict() == {"custom_type": {"custom_body": {"stuff": 2}}} node = CustomAgg(custom_body={"stuff": 2}, meta={"stuff": "meta_stuff"}) assert node.to_dict() == { "custom_type": {"custom_body": {"stuff": 2}}, "meta": {"stuff": "meta_stuff"}, } assert ( node.__str__() == '<CustomAgg, type=custom_type, body={"custom_body": {"stuff": 2}}>' ) # suppose this aggregation type provide buckets in the following format hypothetic_es_response_bucket = {"bucket_value_path": 43} assert node.extract_bucket_value(hypothetic_es_response_bucket) == 43 assert not node.valid_on_field_type("string") assert node.valid_on_field_type("boolean")
34.5
78
0.65781
4a15020d6dd2ba9ce9399f9b72318d726bb1b742
14,021
py
Python
pywinauto/unittests/test_taskbar.py
adolli/pywinauto
fd27458700026f283a23e176bcd7303b68bb5077
[ "BSD-3-Clause" ]
1
2019-02-17T22:46:15.000Z
2019-02-17T22:46:15.000Z
pywinauto/unittests/test_taskbar.py
adolli/pywinauto
fd27458700026f283a23e176bcd7303b68bb5077
[ "BSD-3-Clause" ]
5
2015-12-15T14:22:28.000Z
2019-04-14T09:57:32.000Z
pywinauto/unittests/test_taskbar.py
adolli/pywinauto
fd27458700026f283a23e176bcd7303b68bb5077
[ "BSD-3-Clause" ]
7
2015-12-03T11:08:05.000Z
2020-04-28T05:39:35.000Z
# GUI Application automation and testing library # Copyright (C) 2006-2018 Mark Mc Mahon and Contributors # https://github.com/pywinauto/pywinauto/graphs/contributors # http://pywinauto.readthedocs.io/en/latest/credits.html # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of pywinauto nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests for taskbar.py""" import unittest import sys import os sys.path.append(".") from pywinauto import taskbar # noqa: E402 from pywinauto import findwindows # noqa: E402 from pywinauto.application import Application # noqa: E402 from pywinauto.application import ProcessNotFoundError # noqa: E402 from pywinauto.application import WindowSpecification # noqa: E402 from pywinauto.sysinfo import is_x64_Python, is_x64_OS # noqa: E402 from pywinauto import win32defines # noqa: E402 from pywinauto.timings import wait_until # noqa: E402 import pywinauto.actionlogger # noqa: E402 from pywinauto.timings import Timings # noqa: E402 from pywinauto.controls.common_controls import ToolbarWrapper # noqa: E402 from pywinauto import mouse # noqa: E402 from pywinauto import Desktop # noqa: E402 #pywinauto.actionlogger.enable() mfc_samples_folder = os.path.join( os.path.dirname(__file__), r"..\..\apps\MFC_samples" ) if is_x64_Python(): mfc_samples_folder = os.path.join(mfc_samples_folder, 'x64') _ready_timeout = 60 _retry_interval = 0.5 def _toggle_notification_area_icons(show_all=True, debug_img=None): """ A helper function to change 'Show All Icons' settings. On a succesful execution the function returns an original state of 'Show All Icons' checkbox. The helper works only for an "English" version of Windows, on non-english versions of Windows the 'Notification Area Icons' window should be accessed with a localized title" """ Application().start(r'explorer.exe') class_name = 'CabinetWClass' def _cabinetwclass_exist(): "Verify if at least one active 'CabinetWClass' window is created" l = findwindows.find_elements(active_only=True, class_name=class_name) return (len(l) > 0) wait_until(_ready_timeout, _retry_interval, _cabinetwclass_exist) handle = findwindows.find_elements(active_only=True, class_name=class_name)[-1].handle window = WindowSpecification({'handle': handle, 'backend': 'win32', }) explorer = Application().Connect(process=window.process_id()) cur_state = None try: # Go to "Control Panel -> Notification Area Icons" cmd_str = r'control /name Microsoft.NotificationAreaIcons' for _ in range(3): window.Wait("ready", timeout=_ready_timeout) window.AddressBandRoot.click_input() explorer.WaitCPUUsageLower(threshold=2, timeout=_ready_timeout) window.type_keys(cmd_str, with_spaces=True, set_foreground=True) # verfiy the text in the address combobox after type_keys finished cmbx_spec = window.AddressBandRoot.ComboBoxEx if cmbx_spec.exists(timeout=_ready_timeout, retry_interval=_retry_interval): texts = cmbx_spec.texts() if texts and texts[0] == cmd_str: break # Send ESCs to remove the invalid text window.type_keys("{ESC}" * 3) # Send 'ENTER' separately, this is to make sure # the window focus hasn't accidentally been lost window.type_keys( '{ENTER}', with_spaces=True, set_foreground=True ) explorer.WaitCPUUsageLower(threshold=5, timeout=_ready_timeout) # Get the new opened applet notif_area = Desktop().window(title="Notification Area Icons", class_name=class_name) notif_area.Wait("ready", timeout=_ready_timeout) cur_state = notif_area.CheckBox.GetCheckState() # toggle the checkbox if it differs and close the applet if bool(cur_state) != show_all: notif_area.CheckBox.click_input() notif_area.Ok.click_input() explorer.WaitCPUUsageLower(threshold=5, timeout=_ready_timeout) except Exception as e: if debug_img: from PIL import ImageGrab ImageGrab.grab().save("%s.jpg" % (debug_img), "JPEG") l = pywinauto.actionlogger.ActionLogger() l.log("RuntimeError in _toggle_notification_area_icons") raise e finally: # close the explorer window window.Close() return cur_state def _wait_minimized(dlg): """A helper function to verify that the specified dialog is minimized Basically, WaitNot('visible', timeout=30) would work too, just wanted to make sure the dlg is really got to the 'minimized' state because we test hiding the window to the tray. """ wait_until( timeout=_ready_timeout, retry_interval=_retry_interval, func=lambda: (dlg.GetShowState() == win32defines.SW_SHOWMINIMIZED) ) return True class TaskbarTestCases(unittest.TestCase): """Unit tests for the taskbar""" def setUp(self): """Set some data and ensure the application is in the state we want""" Timings.Defaults() self.tm = _ready_timeout app = Application(backend='win32') app.start(os.path.join(mfc_samples_folder, u"TrayMenu.exe"), wait_for_idle=False) self.app = app self.dlg = app.top_window() mouse.move((-500, 200)) # remove the mouse from the screen to avoid side effects self.dlg.Wait('ready', timeout=self.tm) def tearDown(self): """Close the application after tests""" self.dlg.SendMessage(win32defines.WM_CLOSE) self.dlg.WaitNot('ready') # cleanup additional unclosed sampleapps l = pywinauto.actionlogger.ActionLogger() try: for i in range(2): l.log("Look for unclosed sample apps") app = Application() app.connect(path="TrayMenu.exe") l.log("Forse closing a leftover app: {0}".format(app)) app.kill_() except(ProcessNotFoundError): l.log("No more leftovers. All good.") def testTaskbar(self): # just make sure it's found taskbar.TaskBar.Wait('visible', timeout=self.tm) ''' def testStartButton(self): # TODO: fix it for AppVeyor taskbar.StartButton.click_input() sample_app_exe = os.path.join(mfc_samples_folder, u"TrayMenu.exe") start_menu = taskbar.explorer_app.Window_(class_name='DV2ControlHost') start_menu.SearchEditBoxWrapperClass.click_input() start_menu.SearchEditBoxWrapperClass.type_keys( sample_app_exe() + '{ENTER}', with_spaces=True, set_foreground=False ) time.sleep(5) app = Application.connect(path=sample_app_exe()) dlg = app.top_window() Wait('ready', timeout=self.tm) ''' def testSystemTray(self): taskbar.SystemTray.Wait('visible', timeout=self.tm) # just make sure it's found def testClock(self): "Test opening/closing of a system clock applet" # Just hide a sample app as we don't use it in this test self.dlg.Minimize() _wait_minimized(self.dlg) # Launch Clock applet taskbar.Clock.click_input() ClockWindow = taskbar.explorer_app.Window_(class_name='ClockFlyoutWindow') ClockWindow.Wait('visible', timeout=self.tm) # Close the applet with Esc, we don't click again on it because # the second click sometimes doesn't hide a clock but just relaunch it taskbar.Clock.type_keys("{ESC}", set_foreground=False) ClockWindow.WaitNot('visible', timeout=self.tm) def testClickVisibleIcon(self): """ Test minimizing a sample app into the visible area of the tray and restoring the app back """ if is_x64_Python() != is_x64_OS(): # We don't run this test for mixed cases: # a 32-bit Python process can't interact with # a 64-bit explorer process (taskbar) and vice versa return # Make sure that the hidden icons area is disabled orig_hid_state = _toggle_notification_area_icons( show_all=True, debug_img="%s_01" % (self.id()) ) self.dlg.Minimize() _wait_minimized(self.dlg) menu_window = [None] # Click in the visible area and wait for a popup menu def _show_popup_menu(): taskbar.explorer_app.WaitCPUUsageLower(threshold=5, timeout=self.tm) taskbar.RightClickSystemTrayIcon('MFCTrayDemo') menu = self.app.top_window().children()[0] res = isinstance(menu, ToolbarWrapper) and menu.is_visible() menu_window[0] = menu return res wait_until(self.tm, _retry_interval, _show_popup_menu) menu_window[0].MenuBarClickInput("#2", self.app) popup_window = self.app.top_window() hdl = self.dlg.PopupWindow() self.assertEquals(popup_window.handle, hdl) taskbar.ClickSystemTrayIcon('MFCTrayDemo', double=True) self.dlg.Wait('active', timeout=self.tm) # Restore Notification Area settings _toggle_notification_area_icons(show_all=orig_hid_state, debug_img="%s_02" % (self.id())) def testClickHiddenIcon(self): """ Test minimizing a sample app into the hidden area of the tray and restoring the app back """ if is_x64_Python() != is_x64_OS(): # We don't run this test for mixed cases: # a 32-bit Python process can't interact with # a 64-bit explorer process (taskbar) and vice versa return # Make sure that the hidden icons area is enabled orig_hid_state = _toggle_notification_area_icons( show_all=False, debug_img="%s_01" % (self.id()) ) self.dlg.Minimize() _wait_minimized(self.dlg) # Run one more instance of the sample app # hopefully one of the icons moves into the hidden area app2 = Application() app2.start(os.path.join(mfc_samples_folder, u"TrayMenu.exe")) dlg2 = app2.top_window() dlg2.Wait('visible', timeout=self.tm) dlg2.Minimize() _wait_minimized(dlg2) # Click in the hidden area taskbar.explorer_app.WaitCPUUsageLower(threshold=5, timeout=40) taskbar.ClickHiddenSystemTrayIcon('MFCTrayDemo', double=True) self.dlg.Wait('visible', timeout=self.tm) # Restore Notification Area settings _toggle_notification_area_icons(show_all=orig_hid_state, debug_img="%s_02" % (self.id())) dlg2.SendMessage(win32defines.WM_CLOSE) def testClickCustomizeButton(self): "Test click on the 'show hidden icons' button" # Minimize to tray self.dlg.Minimize() _wait_minimized(self.dlg) # Make sure that the hidden icons area is enabled orig_hid_state = _toggle_notification_area_icons( show_all=False, debug_img="%s_01" % (self.id()) ) # Run one more instance of the sample app # hopefully one of the icons moves into the hidden area app2 = Application() app2.start(os.path.join(mfc_samples_folder, u"TrayMenu.exe")) dlg2 = app2.top_window() dlg2.Wait('visible', timeout=self.tm) dlg2.Minimize() _wait_minimized(dlg2) # Test click on "Show Hidden Icons" button taskbar.ShowHiddenIconsButton.click_input() niow_dlg = taskbar.explorer_app.Window_(class_name='NotifyIconOverflowWindow') niow_dlg.OverflowNotificationAreaToolbar.Wait('ready', timeout=self.tm) niow_dlg.SysLink.click_input() nai = Desktop().window( title="Notification Area Icons", class_name="CabinetWClass" ) nai.wait('ready') origAlwaysShow = nai.CheckBox.GetCheckState() if not origAlwaysShow: nai.CheckBox.click_input() nai.OK.Click() # Restore Notification Area settings _toggle_notification_area_icons(show_all=orig_hid_state, debug_img="%s_02" % (self.id())) # close the second sample app dlg2.SendMessage(win32defines.WM_CLOSE) if __name__ == "__main__": unittest.main()
38.308743
89
0.661579
4a15027e8519b4c2c43c0cceaa2eac6370f04690
46
py
Python
DSA_Snippets/algo_permute.py
mahnooranjum/Python_Programming
ba251e0e855842112efeb968d06458c60eaf1bd3
[ "MIT" ]
null
null
null
DSA_Snippets/algo_permute.py
mahnooranjum/Python_Programming
ba251e0e855842112efeb968d06458c60eaf1bd3
[ "MIT" ]
null
null
null
DSA_Snippets/algo_permute.py
mahnooranjum/Python_Programming
ba251e0e855842112efeb968d06458c60eaf1bd3
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ @author: MAQ """
7.666667
23
0.413043
4a15032e4ea02c980f0b2f96bda17eb3f9d8390b
6,771
py
Python
backend/checklists/settings.py
Neo6666666/checklists
cc4eb1685b63f64620742843b444bad21e74214a
[ "MIT" ]
null
null
null
backend/checklists/settings.py
Neo6666666/checklists
cc4eb1685b63f64620742843b444bad21e74214a
[ "MIT" ]
null
null
null
backend/checklists/settings.py
Neo6666666/checklists
cc4eb1685b63f64620742843b444bad21e74214a
[ "MIT" ]
null
null
null
import os from os import environ def get_env(key, default=None): val = environ.get(key, default) if val == 'True': val = True elif val == 'False': val = False return val BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DEBUG = get_env('DEBUG') SECRET_KEY = get_env('SECRET_KEY') ALLOWED_HOSTS = ["*"] # APP CONFIGURATION # ------------------------------------------------------------------------------ DJANGO_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] THIRD_PARTY_APPS = [ 'rest_framework', 'rest_framework.authtoken', 'djoser', 'import_export', 'django_q', 'post_office', ] if DEBUG: THIRD_PARTY_APPS = ['silk', 'drf_yasg', ] + THIRD_PARTY_APPS # Silk settings SILKY_PYTHON_PROFILER = True SILKY_PYTHON_PROFILER_BINARY = True SILKY_PYTHON_PROFILER_RESULT_PATH = os.path.join(BASE_DIR, 'profiles') SILKY_MAX_REQUEST_BODY_SIZE = -1 # Silk takes anything <0 as no limit SILKY_MAX_RESPONSE_BODY_SIZE = -1 SILKY_META = True # Swagger settings SWAGGER_SETTINGS = { 'USE_SESSION_AUTH': False, 'SECURITY_DEFINITIONS': { 'Token': { 'type': 'apiKey', 'name': 'Authorization', 'in': 'header' } }, 'VALIDATOR_URL': None, } LOCAL_APPS = [ 'user_profile.apps.UserProfileConfig', 'lists', 'notifications', ] # See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS MIDDLEWARE = [ 'checklists.slash_middleware.AppendSlashMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ] if DEBUG: MIDDLEWARE = ['silk.middleware.SilkyMiddleware', ] + MIDDLEWARE ROOT_URLCONF = 'checklists.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, { 'BACKEND': 'post_office.template.backends.post_office.PostOfficeTemplates', 'APP_DIRS': True, 'DIRS': [], 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.template.context_processors.request', ] } } ] WSGI_APPLICATION = 'checklists.wsgi.application' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': get_env('POSTGRES_DB'), 'USER': get_env('POSTGRES_USER'), 'PASSWORD': get_env('POSTGRES_PASSWORD'), 'HOST': get_env('POSTGRES_HOST'), 'PORT': get_env('POSTGRES_PORT'), } } AUTH_USER_MODEL = 'user_profile.UserProfile' AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] LANGUAGE_CODE = 'ru-ru' TIME_ZONE = 'Asia/Novokuznetsk' USE_I18N = True USE_L10N = True USE_TZ = True STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, 'static') MEDIA_URL = '/media/' MEDIA_ROOT = os.path.join(BASE_DIR, 'media') REST_FRAMEWORK = { 'DEFAULT_RENDERER_CLASSES': [ 'rest_framework.renderers.JSONRenderer', ], 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework.authentication.TokenAuthentication', ), 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), } CHOICES_SEPARATOR = ';' APPEND_SLASH = True DJOSER = { 'SERIALIZERS': { 'activation': 'djoser.serializers.ActivationSerializer', 'password_reset': 'djoser.serializers.SendEmailResetSerializer', 'password_reset_confirm': 'djoser.serializers.PasswordResetConfirmSerializer', 'password_reset_confirm_retype': 'djoser.serializers.PasswordResetConfirmRetypeSerializer', 'set_password': 'djoser.serializers.SetPasswordSerializer', 'set_password_retype': 'djoser.serializers.SetPasswordRetypeSerializer', 'set_username': 'djoser.serializers.SetUsernameSerializer', 'set_username_retype': 'djoser.serializers.SetUsernameRetypeSerializer', 'username_reset': 'djoser.serializers.SendEmailResetSerializer', 'username_reset_confirm': 'djoser.serializers.UsernameResetConfirmSerializer', 'username_reset_confirm_retype': 'djoser.serializers.UsernameResetConfirmRetypeSerializer', 'user_create': 'djoser.serializers.UserCreateSerializer', 'user_create_password_retype': 'djoser.serializers.UserCreatePasswordRetypeSerializer', 'user_delete': 'djoser.serializers.UserDeleteSerializer', 'user': 'lists.serializers.UserSerializer', 'current_user': 'lists.serializers.UserSerializer', 'token': 'djoser.serializers.TokenSerializer', 'token_create': 'djoser.serializers.TokenCreateSerializer', }, } Q_CLUSTER = { 'name': 'DjangORM', 'workers': 2, 'retry': 360, 'timeout': 300, 'queue_limit': 50, 'bulk': 10, 'ack_failures': True, 'orm': 'default' } EMAIL_BACKEND = 'post_office.EmailBackend' EMAIL_HOST = 'smtp.yandex.ru' EMAIL_PORT = '587' EMAIL_HOST_USER = get_env('EMAIL_USER') EMAIL_HOST_PASSWORD = get_env('EMAIL_PASSWORD') DEFAULT_FROM_EMAIL = get_env('EMAIL_USER') EMAIL_USE_TLS = True EMAIL_USE_SSL = False POST_OFFICE = { 'DEFAULT_PRIORITY': 'now', 'TEMPLATE_ENGINE': 'post_office', 'BACKENDS': { 'default': 'django_q_email.backends.DjangoQBackend', } } DADATA_KEY = get_env('VUE_APP_DADATA_KEY')
28.56962
99
0.652636
4a1505a02413b18b51702e5897cdfbf0e7a28d35
1,635
py
Python
02_Logistic_regression_and_multilayer_perceptron/nearest_neighbor_tf.py
jastarex/DL_Notes
4da8c5c90283d25655abde95263e44432aad343a
[ "Apache-2.0" ]
203
2017-11-19T08:45:03.000Z
2022-02-17T08:39:02.000Z
02_Logistic_regression_and_multilayer_perceptron/nearest_neighbor_tf.py
datianshi21/DeepLearningCourseCodes
4da8c5c90283d25655abde95263e44432aad343a
[ "Apache-2.0" ]
3
2017-09-19T17:18:46.000Z
2017-10-23T02:30:05.000Z
02_Logistic_regression_and_multilayer_perceptron/nearest_neighbor_tf.py
datianshi21/DeepLearningCourseCodes
4da8c5c90283d25655abde95263e44432aad343a
[ "Apache-2.0" ]
145
2017-11-19T17:21:23.000Z
2022-02-17T08:39:01.000Z
# coding: utf-8 # # K近邻算法, TensorFlow示例 # ### K Nearest Neighbor (KNN) Example # In[1]: from __future__ import print_function import numpy as np import tensorflow as tf # Import MNIST data, 准备MNIST数据输入 from tensorflow.examples.tutorials.mnist import input_data mnist = input_data.read_data_sets("/tmp/data/", one_hot=True) # In[2]: # In this example, we limit mnist data,限制候选比较数据的数目 Xtr, Ytr = mnist.train.next_batch(5000) #5000 for training (nn candidates) Xte, Yte = mnist.test.next_batch(200) #200 for testing # tf Graph Input,TensorFlow图模型的输入定义 xtr = tf.placeholder("float", [None, 784]) xte = tf.placeholder("float", [784]) # Nearest Neighbor calculation using L1 Distance,最小近邻算法的L1曼哈顿距离计算 # Calculate L1 Distance distance = tf.reduce_sum(tf.abs(tf.add(xtr, tf.negative(xte))), reduction_indices=1) # Prediction: Get min distance index (Nearest neighbor),最小距离近邻的预测 pred = tf.arg_min(distance, 0) accuracy = 0. # Initialize the variables (i.e. assign their default value),初始化所有参数 init = tf.global_variables_initializer() # In[3]: # Start training with tf.Session() as sess: sess.run(init) # loop over test data for i in range(len(Xte)): # Get nearest neighbor nn_index = sess.run(pred, feed_dict={xtr: Xtr, xte: Xte[i, :]}) # Get nearest neighbor class label and compare it to its true label print "Test", i, "Prediction:", np.argmax(Ytr[nn_index]), "True Class:", np.argmax(Yte[i]) # Calculate accuracy if np.argmax(Ytr[nn_index]) == np.argmax(Yte[i]): accuracy += 1./len(Xte) print("Done!") print("Accuracy:", accuracy)
28.189655
110
0.692355
4a1506337d799d076e1621b46f45c35214be33c3
398
py
Python
chainervr/__init__.py
otsubo/chainervr
92ab44ea68ed930342698e9da1809eca2ac064f7
[ "MIT" ]
7
2018-07-31T02:18:52.000Z
2021-01-22T05:14:35.000Z
chainervr/__init__.py
otsubo/chainervr
92ab44ea68ed930342698e9da1809eca2ac064f7
[ "MIT" ]
1
2018-08-20T13:37:17.000Z
2018-08-20T13:37:17.000Z
chainervr/__init__.py
otsubo/chainervr
92ab44ea68ed930342698e9da1809eca2ac064f7
[ "MIT" ]
1
2018-08-20T13:33:04.000Z
2018-08-20T13:33:04.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- # Author: Yuki Furuta <furushchev@jsk.imi.i.u-tokyo.ac.jp> import pkg_resources __dist__ = pkg_resources.get_distribution("chainer-video-representations") __version__ = __dist__.version from . import datasets from . import functions from . import links from . import models from . import training from . import utils from . import visualizations
19.9
74
0.758794
4a150697ff124cf8ffcfd0bcb232144af634319a
1,177
py
Python
Level 2/challenge2.2.py
esslushy/GoogleFoobarChallenges
68dc0b86feb4561de0a900389bc9e36a00608b2a
[ "MIT" ]
null
null
null
Level 2/challenge2.2.py
esslushy/GoogleFoobarChallenges
68dc0b86feb4561de0a900389bc9e36a00608b2a
[ "MIT" ]
null
null
null
Level 2/challenge2.2.py
esslushy/GoogleFoobarChallenges
68dc0b86feb4561de0a900389bc9e36a00608b2a
[ "MIT" ]
null
null
null
from functools import reduce def solution(xs): # If it is only one number return that value if len(xs) == 1: return str(xs[0]) # Get out positive and negative numbers. Ignore zeroes pos_nums = [] neg_nums = [] for num in xs: if num > 0: pos_nums.append(num) elif num < 0: neg_nums.append(num) # If negative array has an odd length, remove the smallest negative if len(neg_nums) % 2 == 1: neg_nums.remove(max(neg_nums)) # Uses max because smallest refers only to magnitude and -1 > -2 in terms of magnitude. # Multiply together positive array. if len(pos_nums) > 0: mult_pos_nums = reduce(lambda x, y: x*y, pos_nums) else: mult_pos_nums = 1 # Multiply together negative array if len(neg_nums) > 0: mult_neg_nums = reduce(lambda x, y: x*y, neg_nums) else: mult_neg_nums = 1 # Check if all 0 if len(neg_nums) == 0 and len(pos_nums) == 0: return "0" return str(mult_pos_nums * mult_neg_nums) print(solution([2, 0, 2, 2, 0])) print(solution([-2, -3, 4, -5])) print(solution([0, -0, 0])) print(solution([-1]))
30.179487
126
0.605777
4a15071a7debf69a51cd1a3ecb94793978a5b2de
1,790
py
Python
tests/gcp/operators/test_bigtable_system.py
suensummit/airflow
37a342d0e96a91ce2d34085e225a4e86f54c4e21
[ "Apache-2.0" ]
1
2017-06-25T14:18:15.000Z
2017-06-25T14:18:15.000Z
tests/gcp/operators/test_bigtable_system.py
suensummit/airflow
37a342d0e96a91ce2d34085e225a4e86f54c4e21
[ "Apache-2.0" ]
3
2020-07-07T20:39:24.000Z
2021-09-29T17:34:46.000Z
tests/gcp/operators/test_bigtable_system.py
suensummit/airflow
37a342d0e96a91ce2d34085e225a4e86f54c4e21
[ "Apache-2.0" ]
1
2020-11-04T03:17:51.000Z
2020-11-04T03:17:51.000Z
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import unittest from tests.gcp.operators.test_bigtable_system_helper import GCPBigtableTestHelper from tests.gcp.utils.base_gcp_system_test_case import SKIP_TEST_WARNING, TestDagGcpSystem from tests.gcp.utils.gcp_authenticator import GCP_BIGTABLE_KEY @unittest.skipIf(TestDagGcpSystem.skip_check(GCP_BIGTABLE_KEY), SKIP_TEST_WARNING) class BigTableExampleDagsSystemTest(TestDagGcpSystem): def __init__(self, method_name='runTest'): super().__init__( method_name, dag_id='example_gcp_bigtable_operators', require_local_executor=True, gcp_key=GCP_BIGTABLE_KEY) self.helper = GCPBigtableTestHelper() def test_run_example_dag_gcs_bigtable(self): self._run_dag() def tearDown(self): self.gcp_authenticator.gcp_authenticate() try: self.helper.delete_instance() finally: self.gcp_authenticator.gcp_revoke_authentication() super().tearDown()
38.913043
89
0.746927