body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
31236c36d1ce6eb871c2a302fec1ca498bcecacee4364715e57cf305a1085608
@property def reference_voltage(self): 'The reference voltage for the pin' return 3.3
The reference voltage for the pin
adafruit_seesaw/analoginput.py
reference_voltage
mcauser/Adafruit_CircuitPython_seesaw
1
python
@property def reference_voltage(self): return 3.3
@property def reference_voltage(self): return 3.3<|docstring|>The reference voltage for the pin<|endoftext|>
185679afb3f8c0e6eab95e589cf758fb4c18d9f1c34b813bda83096684f88243
def __init__(self, zone_identifier): '\n :param zone_identifier: \n ' self.zone_identifier = zone_identifier self.value = None
:param zone_identifier:
jdcloud_sdk/services/starshield/apis/ChangeAutomaticHTTPSRewritesSettingRequest.py
__init__
jdcloud-apigateway/jdcloud-sdk-python
14
python
def __init__(self, zone_identifier): '\n \n ' self.zone_identifier = zone_identifier self.value = None
def __init__(self, zone_identifier): '\n \n ' self.zone_identifier = zone_identifier self.value = None<|docstring|>:param zone_identifier:<|endoftext|>
ccf3eb364257ab6dd3d90e3e857875512fe135791a708ccbfbf2197910618926
def setValue(self, value): '\n :param value: (Optional) on - 开启;off - 关闭\n ' self.value = value
:param value: (Optional) on - 开启;off - 关闭
jdcloud_sdk/services/starshield/apis/ChangeAutomaticHTTPSRewritesSettingRequest.py
setValue
jdcloud-apigateway/jdcloud-sdk-python
14
python
def setValue(self, value): '\n \n ' self.value = value
def setValue(self, value): '\n \n ' self.value = value<|docstring|>:param value: (Optional) on - 开启;off - 关闭<|endoftext|>
1c3939942466bf030e8ab54fc6fca453daf88a81c6ad5b0f6fc1d2415720ea9f
def run_gateway(gateway_factory: GatewayFactory) -> None: 'Run a gateway.' LOGGER.info('Starting gateway') try: asyncio.run(start_gateway(gateway_factory)) except KeyboardInterrupt: pass finally: LOGGER.info('Exiting CLI')
Run a gateway.
aiomysensors/cli/helper.py
run_gateway
MartinHjelmare/aiomysensors
4
python
def run_gateway(gateway_factory: GatewayFactory) -> None: LOGGER.info('Starting gateway') try: asyncio.run(start_gateway(gateway_factory)) except KeyboardInterrupt: pass finally: LOGGER.info('Exiting CLI')
def run_gateway(gateway_factory: GatewayFactory) -> None: LOGGER.info('Starting gateway') try: asyncio.run(start_gateway(gateway_factory)) except KeyboardInterrupt: pass finally: LOGGER.info('Exiting CLI')<|docstring|>Run a gateway.<|endoftext|>
99ff836132776377db15db2a6ab4f3d5d9b59e11dd4c8c396f1e07f307569220
async def start_gateway(gateway_factory: GatewayFactory) -> None: 'Start the gateway.' gateway = (await gateway_factory()) async with gateway: while True: try: (await handle_gateway(gateway)) except MissingNodeError as err: LOGGER.debug('Missing node: %s', err.node_id) except MissingChildError as err: LOGGER.debug('Missing child: %s', err.child_id) except UnsupportedMessageError as err: LOGGER.warning('Unsupported message: %s', err) except AIOMySensorsError as err: LOGGER.error("Error '%s'", err) break
Start the gateway.
aiomysensors/cli/helper.py
start_gateway
MartinHjelmare/aiomysensors
4
python
async def start_gateway(gateway_factory: GatewayFactory) -> None: gateway = (await gateway_factory()) async with gateway: while True: try: (await handle_gateway(gateway)) except MissingNodeError as err: LOGGER.debug('Missing node: %s', err.node_id) except MissingChildError as err: LOGGER.debug('Missing child: %s', err.child_id) except UnsupportedMessageError as err: LOGGER.warning('Unsupported message: %s', err) except AIOMySensorsError as err: LOGGER.error("Error '%s'", err) break
async def start_gateway(gateway_factory: GatewayFactory) -> None: gateway = (await gateway_factory()) async with gateway: while True: try: (await handle_gateway(gateway)) except MissingNodeError as err: LOGGER.debug('Missing node: %s', err.node_id) except MissingChildError as err: LOGGER.debug('Missing child: %s', err.child_id) except UnsupportedMessageError as err: LOGGER.warning('Unsupported message: %s', err) except AIOMySensorsError as err: LOGGER.error("Error '%s'", err) break<|docstring|>Start the gateway.<|endoftext|>
7c8a56f65999dfc4e8e7c80a00f72c95a02e70ffad1855e666ebb3585f6d2850
async def handle_gateway(gateway: Gateway) -> None: 'Handle the gateway calls.' async for msg in gateway.listen(): level = (logging.DEBUG if (msg.message_type == 9) else logging.INFO) LOGGER.log(level, 'Received: %s', msg)
Handle the gateway calls.
aiomysensors/cli/helper.py
handle_gateway
MartinHjelmare/aiomysensors
4
python
async def handle_gateway(gateway: Gateway) -> None: async for msg in gateway.listen(): level = (logging.DEBUG if (msg.message_type == 9) else logging.INFO) LOGGER.log(level, 'Received: %s', msg)
async def handle_gateway(gateway: Gateway) -> None: async for msg in gateway.listen(): level = (logging.DEBUG if (msg.message_type == 9) else logging.INFO) LOGGER.log(level, 'Received: %s', msg)<|docstring|>Handle the gateway calls.<|endoftext|>
176077067a57f2de4ad8a9d0a737a16de8cf2f0ded66586ab4bf441c91606898
def setup_platform(hass, config, add_entities, discovery_info=None): 'Set up the Verisure heatpump.' import jsonpath jsonpath = jsonpath.jsonpath global HEAT_PUMPS hub.update_overview() if int(hub.config.get(CONF_CLIMATE, 1)): HEAT_PUMPS = hub.get('$.heatPumps') if HEAT_PUMPS: for heat_pump in HEAT_PUMPS[0]: device_label = jsonpath(heat_pump, '$.deviceLabel')[0] add_entities([VerisureHeatPump(device_label)])
Set up the Verisure heatpump.
homeassistant/components/verisure/climate.py
setup_platform
jamiewalters/home-assistant
0
python
def setup_platform(hass, config, add_entities, discovery_info=None): import jsonpath jsonpath = jsonpath.jsonpath global HEAT_PUMPS hub.update_overview() if int(hub.config.get(CONF_CLIMATE, 1)): HEAT_PUMPS = hub.get('$.heatPumps') if HEAT_PUMPS: for heat_pump in HEAT_PUMPS[0]: device_label = jsonpath(heat_pump, '$.deviceLabel')[0] add_entities([VerisureHeatPump(device_label)])
def setup_platform(hass, config, add_entities, discovery_info=None): import jsonpath jsonpath = jsonpath.jsonpath global HEAT_PUMPS hub.update_overview() if int(hub.config.get(CONF_CLIMATE, 1)): HEAT_PUMPS = hub.get('$.heatPumps') if HEAT_PUMPS: for heat_pump in HEAT_PUMPS[0]: device_label = jsonpath(heat_pump, '$.deviceLabel')[0] add_entities([VerisureHeatPump(device_label)])<|docstring|>Set up the Verisure heatpump.<|endoftext|>
d28a0f418eb8d035b24d44736240e17fd362447f74b5218107a05d81346effb4
def __init__(self, heatpumpid): 'Initialize the climate device.' import jsonpath self.jsonpath = jsonpath.jsonpath self._target_temperature = None self._current_operation = None self._current_fan_mode = None self._current_swing_mode = None self._on = None self.heatpump_id = heatpumpid self._support_flags = ((((SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE) | SUPPORT_OPERATION_MODE) | SUPPORT_ON_OFF) | SUPPORT_SWING_MODE) self._unit_of_measurement = TEMP_CELSIUS self._fan_list = ['Auto', 'Low', 'Medium_Low', 'Medium', 'Medium_High', 'High'] self._operation_list = ['heat', 'cool', 'auto', 'dry', 'fan_only'] self._swing_list = ['Auto', '0_Degrees', '30_Degrees', '60_Degrees', '90_Degrees'] self._config_date = None self.sync_data()
Initialize the climate device.
homeassistant/components/verisure/climate.py
__init__
jamiewalters/home-assistant
0
python
def __init__(self, heatpumpid): import jsonpath self.jsonpath = jsonpath.jsonpath self._target_temperature = None self._current_operation = None self._current_fan_mode = None self._current_swing_mode = None self._on = None self.heatpump_id = heatpumpid self._support_flags = ((((SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE) | SUPPORT_OPERATION_MODE) | SUPPORT_ON_OFF) | SUPPORT_SWING_MODE) self._unit_of_measurement = TEMP_CELSIUS self._fan_list = ['Auto', 'Low', 'Medium_Low', 'Medium', 'Medium_High', 'High'] self._operation_list = ['heat', 'cool', 'auto', 'dry', 'fan_only'] self._swing_list = ['Auto', '0_Degrees', '30_Degrees', '60_Degrees', '90_Degrees'] self._config_date = None self.sync_data()
def __init__(self, heatpumpid): import jsonpath self.jsonpath = jsonpath.jsonpath self._target_temperature = None self._current_operation = None self._current_fan_mode = None self._current_swing_mode = None self._on = None self.heatpump_id = heatpumpid self._support_flags = ((((SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE) | SUPPORT_OPERATION_MODE) | SUPPORT_ON_OFF) | SUPPORT_SWING_MODE) self._unit_of_measurement = TEMP_CELSIUS self._fan_list = ['Auto', 'Low', 'Medium_Low', 'Medium', 'Medium_High', 'High'] self._operation_list = ['heat', 'cool', 'auto', 'dry', 'fan_only'] self._swing_list = ['Auto', '0_Degrees', '30_Degrees', '60_Degrees', '90_Degrees'] self._config_date = None self.sync_data()<|docstring|>Initialize the climate device.<|endoftext|>
54b137bdd8f4c954f37b4616342821092afacd06ae79abd5692860f6655fa6f9
def sync_data(self): 'Update data from Verisure.' import dateutil.parser global HEAT_PUMPS hub.update_overview() HEAT_PUMPS = hub.get('$.heatPumps')[0] self.heatpumpstate = self.jsonpath(HEAT_PUMPS, (("$.[?(@.deviceLabel == '" + self.heatpump_id) + "')]"))[0] self._name = self.jsonpath(self.heatpumpstate, '$.area')[0] sync_date = dateutil.parser.parse(self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.changedTime')[0]) self._current_temperature = self.jsonpath(self.heatpumpstate, '$.latestClimateSample.temperature')[0] if ((self._config_date is None) or (self._config_date < sync_date)): self._target_temperature = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.targetTemperature')[0] current_operation = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.mode')[0] self._current_operation = VERISIRE_HASS_OP_MODE[current_operation] self._current_fan_mode = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.fanSpeed')[0].title() self._current_swing_mode = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.airSwingDirection.vertical')[0].title() self._on = bool((self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.power')[0] == 'ON')) self._config_date = sync_date
Update data from Verisure.
homeassistant/components/verisure/climate.py
sync_data
jamiewalters/home-assistant
0
python
def sync_data(self): import dateutil.parser global HEAT_PUMPS hub.update_overview() HEAT_PUMPS = hub.get('$.heatPumps')[0] self.heatpumpstate = self.jsonpath(HEAT_PUMPS, (("$.[?(@.deviceLabel == '" + self.heatpump_id) + "')]"))[0] self._name = self.jsonpath(self.heatpumpstate, '$.area')[0] sync_date = dateutil.parser.parse(self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.changedTime')[0]) self._current_temperature = self.jsonpath(self.heatpumpstate, '$.latestClimateSample.temperature')[0] if ((self._config_date is None) or (self._config_date < sync_date)): self._target_temperature = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.targetTemperature')[0] current_operation = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.mode')[0] self._current_operation = VERISIRE_HASS_OP_MODE[current_operation] self._current_fan_mode = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.fanSpeed')[0].title() self._current_swing_mode = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.airSwingDirection.vertical')[0].title() self._on = bool((self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.power')[0] == 'ON')) self._config_date = sync_date
def sync_data(self): import dateutil.parser global HEAT_PUMPS hub.update_overview() HEAT_PUMPS = hub.get('$.heatPumps')[0] self.heatpumpstate = self.jsonpath(HEAT_PUMPS, (("$.[?(@.deviceLabel == '" + self.heatpump_id) + "')]"))[0] self._name = self.jsonpath(self.heatpumpstate, '$.area')[0] sync_date = dateutil.parser.parse(self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.changedTime')[0]) self._current_temperature = self.jsonpath(self.heatpumpstate, '$.latestClimateSample.temperature')[0] if ((self._config_date is None) or (self._config_date < sync_date)): self._target_temperature = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.targetTemperature')[0] current_operation = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.mode')[0] self._current_operation = VERISIRE_HASS_OP_MODE[current_operation] self._current_fan_mode = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.fanSpeed')[0].title() self._current_swing_mode = self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.airSwingDirection.vertical')[0].title() self._on = bool((self.jsonpath(self.heatpumpstate, '$.heatPumpConfig.power')[0] == 'ON')) self._config_date = sync_date<|docstring|>Update data from Verisure.<|endoftext|>
b3e0896f6a232c6f88ddfe999c7b9818831b513a6af47611b8ad226b118989ad
@property def supported_features(self): 'Return the list of supported features.' return self._support_flags
Return the list of supported features.
homeassistant/components/verisure/climate.py
supported_features
jamiewalters/home-assistant
0
python
@property def supported_features(self): return self._support_flags
@property def supported_features(self): return self._support_flags<|docstring|>Return the list of supported features.<|endoftext|>
18e5542644ee499b4eedf143299a968254a1471113cf65184f7cd7238be318d1
@property def should_poll(self): 'Return the polling state.' return True
Return the polling state.
homeassistant/components/verisure/climate.py
should_poll
jamiewalters/home-assistant
0
python
@property def should_poll(self): return True
@property def should_poll(self): return True<|docstring|>Return the polling state.<|endoftext|>
9760ef283a881aae619812db99cf36824040d181aa608fa1d2616cc10fe2266a
@property def name(self): 'Return the name of the climate device.' return self._name
Return the name of the climate device.
homeassistant/components/verisure/climate.py
name
jamiewalters/home-assistant
0
python
@property def name(self): return self._name
@property def name(self): return self._name<|docstring|>Return the name of the climate device.<|endoftext|>
34baa453ddfdda25a6d112e16c115878e4f38dee73c6864e774599663c22013f
@property def target_temperature_step(self): 'Representation target temperature step.' return 1.0
Representation target temperature step.
homeassistant/components/verisure/climate.py
target_temperature_step
jamiewalters/home-assistant
0
python
@property def target_temperature_step(self): return 1.0
@property def target_temperature_step(self): return 1.0<|docstring|>Representation target temperature step.<|endoftext|>
7862b525aaa2230593c785f2eca9d7305539768cf040a5988d5a0e351c536a0a
@property def temperature_unit(self): 'Return the unit of measurement.' return self._unit_of_measurement
Return the unit of measurement.
homeassistant/components/verisure/climate.py
temperature_unit
jamiewalters/home-assistant
0
python
@property def temperature_unit(self): return self._unit_of_measurement
@property def temperature_unit(self): return self._unit_of_measurement<|docstring|>Return the unit of measurement.<|endoftext|>
fd634be450fe80bff08e57558c891792b7f862c58a70b426b1b36c4d6e95c159
@property def current_temperature(self): 'Return the current temperature.' return self._current_temperature
Return the current temperature.
homeassistant/components/verisure/climate.py
current_temperature
jamiewalters/home-assistant
0
python
@property def current_temperature(self): return self._current_temperature
@property def current_temperature(self): return self._current_temperature<|docstring|>Return the current temperature.<|endoftext|>
8956becf511eeffd4700bce86969deeaddb186bea95107045dc3a33ab2acc600
@property def target_temperature(self): 'Return the temperature we try to reach.' return self._target_temperature
Return the temperature we try to reach.
homeassistant/components/verisure/climate.py
target_temperature
jamiewalters/home-assistant
0
python
@property def target_temperature(self): return self._target_temperature
@property def target_temperature(self): return self._target_temperature<|docstring|>Return the temperature we try to reach.<|endoftext|>
4e08b54443fc34d7eede67724ad3a97998479599933a75dcf56dfcc4e31578f2
@property def current_operation(self): 'Return current operation ie. heat, cool, idle.' return self._current_operation
Return current operation ie. heat, cool, idle.
homeassistant/components/verisure/climate.py
current_operation
jamiewalters/home-assistant
0
python
@property def current_operation(self): return self._current_operation
@property def current_operation(self): return self._current_operation<|docstring|>Return current operation ie. heat, cool, idle.<|endoftext|>
2d2e9421fd18aa71a69e6079424bf76b24bdfe44537e781f7140a88740011910
@property def operation_list(self): 'Return the list of available operation modes.' return self._operation_list
Return the list of available operation modes.
homeassistant/components/verisure/climate.py
operation_list
jamiewalters/home-assistant
0
python
@property def operation_list(self): return self._operation_list
@property def operation_list(self): return self._operation_list<|docstring|>Return the list of available operation modes.<|endoftext|>
271d478a0c6b415886713b36fd2e827f29e2ef16f85ec34404364f8d29c10da4
@property def is_on(self): 'Return true if the device is on.' return self._on
Return true if the device is on.
homeassistant/components/verisure/climate.py
is_on
jamiewalters/home-assistant
0
python
@property def is_on(self): return self._on
@property def is_on(self): return self._on<|docstring|>Return true if the device is on.<|endoftext|>
6b961a90a4e496467e3e080b00e111214f4bf4dca959931ed4b1679e1ca78939
@property def current_fan_mode(self): 'Return the fan setting.' return self._current_fan_mode
Return the fan setting.
homeassistant/components/verisure/climate.py
current_fan_mode
jamiewalters/home-assistant
0
python
@property def current_fan_mode(self): return self._current_fan_mode
@property def current_fan_mode(self): return self._current_fan_mode<|docstring|>Return the fan setting.<|endoftext|>
f714d8874cc56cd14b7207af54e0b735a703f8f53e68c8307fd20036e1cdf7bc
@property def fan_list(self): 'Return the list of available fan modes.' return self._fan_list
Return the list of available fan modes.
homeassistant/components/verisure/climate.py
fan_list
jamiewalters/home-assistant
0
python
@property def fan_list(self): return self._fan_list
@property def fan_list(self): return self._fan_list<|docstring|>Return the list of available fan modes.<|endoftext|>
75e044ff8e69bb1fdbf2913ca576b7d144db258c18f3b20410784282065ddd39
def set_temperature(self, **kwargs): 'Set new target temperatures.' if self._on: if (kwargs.get(ATTR_TEMPERATURE) is not None): self._target_temperature = kwargs.get(ATTR_TEMPERATURE) hub.session.set_heat_pump_target_temperature(self.heatpump_id, self._target_temperature) self.schedule_update_ha_state()
Set new target temperatures.
homeassistant/components/verisure/climate.py
set_temperature
jamiewalters/home-assistant
0
python
def set_temperature(self, **kwargs): if self._on: if (kwargs.get(ATTR_TEMPERATURE) is not None): self._target_temperature = kwargs.get(ATTR_TEMPERATURE) hub.session.set_heat_pump_target_temperature(self.heatpump_id, self._target_temperature) self.schedule_update_ha_state()
def set_temperature(self, **kwargs): if self._on: if (kwargs.get(ATTR_TEMPERATURE) is not None): self._target_temperature = kwargs.get(ATTR_TEMPERATURE) hub.session.set_heat_pump_target_temperature(self.heatpump_id, self._target_temperature) self.schedule_update_ha_state()<|docstring|>Set new target temperatures.<|endoftext|>
f38ce135b6c8a55e4fd18bc6ff811dd070a1df0809c7dcbcefc5e5f3a387a58f
def set_swing_mode(self, swing_mode): 'Set new swing setting.' if self._on: hub.session.set_heat_pump_airswingdirection(self.heatpump_id, swing_mode.upper()) self._current_swing_mode = swing_mode self.schedule_update_ha_state()
Set new swing setting.
homeassistant/components/verisure/climate.py
set_swing_mode
jamiewalters/home-assistant
0
python
def set_swing_mode(self, swing_mode): if self._on: hub.session.set_heat_pump_airswingdirection(self.heatpump_id, swing_mode.upper()) self._current_swing_mode = swing_mode self.schedule_update_ha_state()
def set_swing_mode(self, swing_mode): if self._on: hub.session.set_heat_pump_airswingdirection(self.heatpump_id, swing_mode.upper()) self._current_swing_mode = swing_mode self.schedule_update_ha_state()<|docstring|>Set new swing setting.<|endoftext|>
a6733cf1dcb3dc71fd4befad88d69021a1c92bd16f0edff28a9b9333583e914b
def set_fan_mode(self, fan_mode): 'Set new target temperature.' if self._on: hub.session.set_heat_pump_fan_speed(self.heatpump_id, fan_mode.upper()) self._current_fan_mode = fan_mode self.schedule_update_ha_state()
Set new target temperature.
homeassistant/components/verisure/climate.py
set_fan_mode
jamiewalters/home-assistant
0
python
def set_fan_mode(self, fan_mode): if self._on: hub.session.set_heat_pump_fan_speed(self.heatpump_id, fan_mode.upper()) self._current_fan_mode = fan_mode self.schedule_update_ha_state()
def set_fan_mode(self, fan_mode): if self._on: hub.session.set_heat_pump_fan_speed(self.heatpump_id, fan_mode.upper()) self._current_fan_mode = fan_mode self.schedule_update_ha_state()<|docstring|>Set new target temperature.<|endoftext|>
5c1e9b1f300aee2b45e0559b4ec081fba19d721e5994f6848cb7fb87f14627f1
def set_operation_mode(self, operation_mode): 'Set new target temperature.' if self._on: hub.session.set_heat_pump_mode(self.heatpump_id, HASS_VERISURE_OP_MODE[operation_mode]) self._current_operation = operation_mode self.schedule_update_ha_state()
Set new target temperature.
homeassistant/components/verisure/climate.py
set_operation_mode
jamiewalters/home-assistant
0
python
def set_operation_mode(self, operation_mode): if self._on: hub.session.set_heat_pump_mode(self.heatpump_id, HASS_VERISURE_OP_MODE[operation_mode]) self._current_operation = operation_mode self.schedule_update_ha_state()
def set_operation_mode(self, operation_mode): if self._on: hub.session.set_heat_pump_mode(self.heatpump_id, HASS_VERISURE_OP_MODE[operation_mode]) self._current_operation = operation_mode self.schedule_update_ha_state()<|docstring|>Set new target temperature.<|endoftext|>
6f255cc7298a709b91d16b54032e35c8471780de25a628c784e8fc52ed5bae43
@property def current_swing_mode(self): 'Return the swing setting.' return self._current_swing_mode
Return the swing setting.
homeassistant/components/verisure/climate.py
current_swing_mode
jamiewalters/home-assistant
0
python
@property def current_swing_mode(self): return self._current_swing_mode
@property def current_swing_mode(self): return self._current_swing_mode<|docstring|>Return the swing setting.<|endoftext|>
86053e8ebc75abe5a5f3503c9386f6f59146ef8ef76e9765d8711691e1e6a14a
@property def swing_list(self): 'List of available swing modes.' return self._swing_list
List of available swing modes.
homeassistant/components/verisure/climate.py
swing_list
jamiewalters/home-assistant
0
python
@property def swing_list(self): return self._swing_list
@property def swing_list(self): return self._swing_list<|docstring|>List of available swing modes.<|endoftext|>
57324563f7cba948967f6ebb6a7e87cbe238a46e2942e4f5a935a0575c7c72a6
def turn_on(self): 'Turn on.' hub.session.set_heat_pump_power(self.heatpump_id, 'ON') self._on = True self.schedule_update_ha_state()
Turn on.
homeassistant/components/verisure/climate.py
turn_on
jamiewalters/home-assistant
0
python
def turn_on(self): hub.session.set_heat_pump_power(self.heatpump_id, 'ON') self._on = True self.schedule_update_ha_state()
def turn_on(self): hub.session.set_heat_pump_power(self.heatpump_id, 'ON') self._on = True self.schedule_update_ha_state()<|docstring|>Turn on.<|endoftext|>
9bdd245778e29ac740ce91dd8717acaf18e93030e22b9e3e07a6e82137da2a66
def turn_off(self): 'Turn off.' hub.session.set_heat_pump_power(self.heatpump_id, 'OFF') self._on = False self.schedule_update_ha_state()
Turn off.
homeassistant/components/verisure/climate.py
turn_off
jamiewalters/home-assistant
0
python
def turn_off(self): hub.session.set_heat_pump_power(self.heatpump_id, 'OFF') self._on = False self.schedule_update_ha_state()
def turn_off(self): hub.session.set_heat_pump_power(self.heatpump_id, 'OFF') self._on = False self.schedule_update_ha_state()<|docstring|>Turn off.<|endoftext|>
0d9a5932cc4888a027a2ba68760ef392f9be3496a410656f6d69bf7cba4fcca3
def update(self): 'Update self.' self.sync_data()
Update self.
homeassistant/components/verisure/climate.py
update
jamiewalters/home-assistant
0
python
def update(self): self.sync_data()
def update(self): self.sync_data()<|docstring|>Update self.<|endoftext|>
f16becd7287bef0d05c271315b669c33ad7d19964900409ac43565b5e5d55eca
def __getattr__(cls, name): "Return the enum member matching `name`\n We use __getattr__ instead of descriptors or inserting into the enum\n class' __dict__ in order to support `name` and `value` being both\n properties for enum members (which live in the class' __dict__) and\n enum members themselves.\n " try: return cls._member_map_[name.upper()] except KeyError: raise AttributeError(name)
Return the enum member matching `name` We use __getattr__ instead of descriptors or inserting into the enum class' __dict__ in order to support `name` and `value` being both properties for enum members (which live in the class' __dict__) and enum members themselves.
sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_redis_management_client_enums.py
__getattr__
JayDoubleu/azure-sdk-for-python
3
python
def __getattr__(cls, name): "Return the enum member matching `name`\n We use __getattr__ instead of descriptors or inserting into the enum\n class' __dict__ in order to support `name` and `value` being both\n properties for enum members (which live in the class' __dict__) and\n enum members themselves.\n " try: return cls._member_map_[name.upper()] except KeyError: raise AttributeError(name)
def __getattr__(cls, name): "Return the enum member matching `name`\n We use __getattr__ instead of descriptors or inserting into the enum\n class' __dict__ in order to support `name` and `value` being both\n properties for enum members (which live in the class' __dict__) and\n enum members themselves.\n " try: return cls._member_map_[name.upper()] except KeyError: raise AttributeError(name)<|docstring|>Return the enum member matching `name` We use __getattr__ instead of descriptors or inserting into the enum class' __dict__ in order to support `name` and `value` being both properties for enum members (which live in the class' __dict__) and enum members themselves.<|endoftext|>
869e3fdb25e6d2b8e8630f799999118aa29622b950e3a2a293f5b5d6b4c379a8
def current_commit(git_dir): '\n Retrieve the current HEAD commit hash from a git repository.\n\n Parameters:\n git_dir (str) : the location of the git repository .git directory.\n ' out = run(git_dir, 'rev-parse --short HEAD') if (out.returncode == SUCCESS): return out.stdout.strip('\n') else: print(('Git Error: ' + out.stderr.strip('\n'))) sys.exit(1)
Retrieve the current HEAD commit hash from a git repository. Parameters: git_dir (str) : the location of the git repository .git directory.
git.py
current_commit
autamus/merge-commits
2
python
def current_commit(git_dir): '\n Retrieve the current HEAD commit hash from a git repository.\n\n Parameters:\n git_dir (str) : the location of the git repository .git directory.\n ' out = run(git_dir, 'rev-parse --short HEAD') if (out.returncode == SUCCESS): return out.stdout.strip('\n') else: print(('Git Error: ' + out.stderr.strip('\n'))) sys.exit(1)
def current_commit(git_dir): '\n Retrieve the current HEAD commit hash from a git repository.\n\n Parameters:\n git_dir (str) : the location of the git repository .git directory.\n ' out = run(git_dir, 'rev-parse --short HEAD') if (out.returncode == SUCCESS): return out.stdout.strip('\n') else: print(('Git Error: ' + out.stderr.strip('\n'))) sys.exit(1)<|docstring|>Retrieve the current HEAD commit hash from a git repository. Parameters: git_dir (str) : the location of the git repository .git directory.<|endoftext|>
aa55580ae7474f70795c121018fcae010e76dcd81c495d433ca212de7e875681
def commit_parents(git_dir, current_commit): '\n Retrive a list of the parents for a given commit hash.\n\n Parameters:\n git_dir (str) : the location of the git repository .git directory.\n current_commit (str) : the hash of a commit from within the repository.\n ' out = run(git_dir, f'show -s --pretty=%P {current_commit}') if (out.returncode == SUCCESS): return out.stdout.strip('\n').split() else: print(('Git Error: ' + out.stderr.strip('\n'))) sys.exit(1)
Retrive a list of the parents for a given commit hash. Parameters: git_dir (str) : the location of the git repository .git directory. current_commit (str) : the hash of a commit from within the repository.
git.py
commit_parents
autamus/merge-commits
2
python
def commit_parents(git_dir, current_commit): '\n Retrive a list of the parents for a given commit hash.\n\n Parameters:\n git_dir (str) : the location of the git repository .git directory.\n current_commit (str) : the hash of a commit from within the repository.\n ' out = run(git_dir, f'show -s --pretty=%P {current_commit}') if (out.returncode == SUCCESS): return out.stdout.strip('\n').split() else: print(('Git Error: ' + out.stderr.strip('\n'))) sys.exit(1)
def commit_parents(git_dir, current_commit): '\n Retrive a list of the parents for a given commit hash.\n\n Parameters:\n git_dir (str) : the location of the git repository .git directory.\n current_commit (str) : the hash of a commit from within the repository.\n ' out = run(git_dir, f'show -s --pretty=%P {current_commit}') if (out.returncode == SUCCESS): return out.stdout.strip('\n').split() else: print(('Git Error: ' + out.stderr.strip('\n'))) sys.exit(1)<|docstring|>Retrive a list of the parents for a given commit hash. Parameters: git_dir (str) : the location of the git repository .git directory. current_commit (str) : the hash of a commit from within the repository.<|endoftext|>
d2087a6c298a539c94ee2471e4dc6fe05f59765447c0325f56fa0a41f3ad333f
def create_attn_model(in_ch=4): 'Constructs attention network for ViMON.\n ' encoder = Encoder(AttentionBlock, [in_ch, 32, 32, 32, 32], [32, 32, 32, 32, 32]) decoder = Decoder(AttentionBlock, [64, 64, 64, 64, 64], [32, 32, 32, 32, 32]) model = AttentionModule(encoder, decoder, in_ch, mlp_ch=32, n_ch=512, n_sp=4) return model
Constructs attention network for ViMON.
ocrb/vimon/networks/attention.py
create_attn_model
ecker-lab/object-centric-representation-benchmark
25
python
def create_attn_model(in_ch=4): '\n ' encoder = Encoder(AttentionBlock, [in_ch, 32, 32, 32, 32], [32, 32, 32, 32, 32]) decoder = Decoder(AttentionBlock, [64, 64, 64, 64, 64], [32, 32, 32, 32, 32]) model = AttentionModule(encoder, decoder, in_ch, mlp_ch=32, n_ch=512, n_sp=4) return model
def create_attn_model(in_ch=4): '\n ' encoder = Encoder(AttentionBlock, [in_ch, 32, 32, 32, 32], [32, 32, 32, 32, 32]) decoder = Decoder(AttentionBlock, [64, 64, 64, 64, 64], [32, 32, 32, 32, 32]) model = AttentionModule(encoder, decoder, in_ch, mlp_ch=32, n_ch=512, n_sp=4) return model<|docstring|>Constructs attention network for ViMON.<|endoftext|>
b9d27fff6c92e0c42bf5f184ac09bbcb8c32f0d1a61d15e10a89ebd03b1d29a3
def walk(cat, depth=1): 'Return a generator walking a THREDDS data catalog for datasets.\n\n Parameters\n ----------\n cat : TDSCatalog\n THREDDS catalog.\n depth : int\n Maximum recursive depth. Setting 0 will return only datasets within the top-level catalog. If None,\n depth is set to 1000.\n ' (yield from cat.datasets.items()) if (depth is None): depth = 1000 if (depth > 0): for (name, ref) in cat.catalog_refs.items(): child = ref.follow() (yield from walk(child, depth=(depth - 1)))
Return a generator walking a THREDDS data catalog for datasets. Parameters ---------- cat : TDSCatalog THREDDS catalog. depth : int Maximum recursive depth. Setting 0 will return only datasets within the top-level catalog. If None, depth is set to 1000.
stac_ingest/utils/tds.py
walk
crim-ca/stac-ingest
0
python
def walk(cat, depth=1): 'Return a generator walking a THREDDS data catalog for datasets.\n\n Parameters\n ----------\n cat : TDSCatalog\n THREDDS catalog.\n depth : int\n Maximum recursive depth. Setting 0 will return only datasets within the top-level catalog. If None,\n depth is set to 1000.\n ' (yield from cat.datasets.items()) if (depth is None): depth = 1000 if (depth > 0): for (name, ref) in cat.catalog_refs.items(): child = ref.follow() (yield from walk(child, depth=(depth - 1)))
def walk(cat, depth=1): 'Return a generator walking a THREDDS data catalog for datasets.\n\n Parameters\n ----------\n cat : TDSCatalog\n THREDDS catalog.\n depth : int\n Maximum recursive depth. Setting 0 will return only datasets within the top-level catalog. If None,\n depth is set to 1000.\n ' (yield from cat.datasets.items()) if (depth is None): depth = 1000 if (depth > 0): for (name, ref) in cat.catalog_refs.items(): child = ref.follow() (yield from walk(child, depth=(depth - 1)))<|docstring|>Return a generator walking a THREDDS data catalog for datasets. Parameters ---------- cat : TDSCatalog THREDDS catalog. depth : int Maximum recursive depth. Setting 0 will return only datasets within the top-level catalog. If None, depth is set to 1000.<|endoftext|>
02a3687cd33af72eeea8e4eeacd910236cba869d1a45ed126f2a53e09fd2830f
def attrs_from_ds(ds): 'Extract attributes from TDS Dataset.' url = ds.access_urls['NCML'] attrs = attrs_from_ncml(url) attrs['__services__'] = ds.access_urls return attrs
Extract attributes from TDS Dataset.
stac_ingest/utils/tds.py
attrs_from_ds
crim-ca/stac-ingest
0
python
def attrs_from_ds(ds): url = ds.access_urls['NCML'] attrs = attrs_from_ncml(url) attrs['__services__'] = ds.access_urls return attrs
def attrs_from_ds(ds): url = ds.access_urls['NCML'] attrs = attrs_from_ncml(url) attrs['__services__'] = ds.access_urls return attrs<|docstring|>Extract attributes from TDS Dataset.<|endoftext|>
4ba02057413ad28625645a4348ad8b47275daf8356d95218431a4ba53a0be8d9
def attrs_from_ncml(url): 'Extract attributes from NcML file.\n\n Parameters\n ----------\n url : str\n Link to NcML service of THREDDS server for a dataset.\n\n Returns\n -------\n dict\n Global attribute values keyed by facet names, with variable attributes in `__variable__` nested dict, and\n additional specialized attributes in `__group__` nested dict.\n ' import lxml.etree import requests parser = lxml.etree.XMLParser(encoding='UTF-8') ns = {'ncml': 'http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2'} xml = requests.get(url).content doc = lxml.etree.fromstring(xml, parser=parser) nc = doc.xpath('/ncml:netcdf', namespaces=ns)[0] out = _attrib_to_dict(nc.xpath('ncml:attribute', namespaces=ns)) gr = {} for group in nc.xpath('ncml:group', namespaces=ns): gr[group.attrib['name']] = _attrib_to_dict(group.xpath('ncml:attribute', namespaces=ns)) va = {} for variable in nc.xpath('ncml:variable', namespaces=ns): if ('_CoordinateAxisType' in variable.xpath('ncml:attribute/@name', namespaces=ns)): continue va[variable.attrib['name']] = _attrib_to_dict(variable.xpath('ncml:attribute', namespaces=ns)) out['__group__'] = gr out['__variable__'] = va return out
Extract attributes from NcML file. Parameters ---------- url : str Link to NcML service of THREDDS server for a dataset. Returns ------- dict Global attribute values keyed by facet names, with variable attributes in `__variable__` nested dict, and additional specialized attributes in `__group__` nested dict.
stac_ingest/utils/tds.py
attrs_from_ncml
crim-ca/stac-ingest
0
python
def attrs_from_ncml(url): 'Extract attributes from NcML file.\n\n Parameters\n ----------\n url : str\n Link to NcML service of THREDDS server for a dataset.\n\n Returns\n -------\n dict\n Global attribute values keyed by facet names, with variable attributes in `__variable__` nested dict, and\n additional specialized attributes in `__group__` nested dict.\n ' import lxml.etree import requests parser = lxml.etree.XMLParser(encoding='UTF-8') ns = {'ncml': 'http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2'} xml = requests.get(url).content doc = lxml.etree.fromstring(xml, parser=parser) nc = doc.xpath('/ncml:netcdf', namespaces=ns)[0] out = _attrib_to_dict(nc.xpath('ncml:attribute', namespaces=ns)) gr = {} for group in nc.xpath('ncml:group', namespaces=ns): gr[group.attrib['name']] = _attrib_to_dict(group.xpath('ncml:attribute', namespaces=ns)) va = {} for variable in nc.xpath('ncml:variable', namespaces=ns): if ('_CoordinateAxisType' in variable.xpath('ncml:attribute/@name', namespaces=ns)): continue va[variable.attrib['name']] = _attrib_to_dict(variable.xpath('ncml:attribute', namespaces=ns)) out['__group__'] = gr out['__variable__'] = va return out
def attrs_from_ncml(url): 'Extract attributes from NcML file.\n\n Parameters\n ----------\n url : str\n Link to NcML service of THREDDS server for a dataset.\n\n Returns\n -------\n dict\n Global attribute values keyed by facet names, with variable attributes in `__variable__` nested dict, and\n additional specialized attributes in `__group__` nested dict.\n ' import lxml.etree import requests parser = lxml.etree.XMLParser(encoding='UTF-8') ns = {'ncml': 'http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2'} xml = requests.get(url).content doc = lxml.etree.fromstring(xml, parser=parser) nc = doc.xpath('/ncml:netcdf', namespaces=ns)[0] out = _attrib_to_dict(nc.xpath('ncml:attribute', namespaces=ns)) gr = {} for group in nc.xpath('ncml:group', namespaces=ns): gr[group.attrib['name']] = _attrib_to_dict(group.xpath('ncml:attribute', namespaces=ns)) va = {} for variable in nc.xpath('ncml:variable', namespaces=ns): if ('_CoordinateAxisType' in variable.xpath('ncml:attribute/@name', namespaces=ns)): continue va[variable.attrib['name']] = _attrib_to_dict(variable.xpath('ncml:attribute', namespaces=ns)) out['__group__'] = gr out['__variable__'] = va return out<|docstring|>Extract attributes from NcML file. Parameters ---------- url : str Link to NcML service of THREDDS server for a dataset. Returns ------- dict Global attribute values keyed by facet names, with variable attributes in `__variable__` nested dict, and additional specialized attributes in `__group__` nested dict.<|endoftext|>
4a8901df37f19f2c58840560f34dc07cb0d27198ccb8448957d3ee24372e8d81
def _attrib_to_dict(elems): 'Convert element attributes to dictionary.\n\n Ignore attributes with names starting with _\n ' hidden_prefix = '_' out = {} for e in elems: a = e.attrib if a['name'].startswith(hidden_prefix): continue out[a['name']] = a['value'] return out
Convert element attributes to dictionary. Ignore attributes with names starting with _
stac_ingest/utils/tds.py
_attrib_to_dict
crim-ca/stac-ingest
0
python
def _attrib_to_dict(elems): 'Convert element attributes to dictionary.\n\n Ignore attributes with names starting with _\n ' hidden_prefix = '_' out = {} for e in elems: a = e.attrib if a['name'].startswith(hidden_prefix): continue out[a['name']] = a['value'] return out
def _attrib_to_dict(elems): 'Convert element attributes to dictionary.\n\n Ignore attributes with names starting with _\n ' hidden_prefix = '_' out = {} for e in elems: a = e.attrib if a['name'].startswith(hidden_prefix): continue out[a['name']] = a['value'] return out<|docstring|>Convert element attributes to dictionary. Ignore attributes with names starting with _<|endoftext|>
e777230d105a58590e66b06ba239db2bb3c2f131d5a2d1bd0648b0ca28829989
def get_report_filters(self, name, **kwargs): 'Retrieves report filters # noqa: E501\n\n Retrieves report filters # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_filters(name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: The report name (required)\n :return: RunReport\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_report_filters_with_http_info(name, **kwargs) else: data = self.get_report_filters_with_http_info(name, **kwargs) return data
Retrieves report filters # noqa: E501 Retrieves report filters # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_report_filters(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The report name (required) :return: RunReport If the method is called asynchronously, returns the request thread.
src/clients/ctm_saas_client/api/reporting_api.py
get_report_filters
IceT-M/ctm-python-client
5
python
def get_report_filters(self, name, **kwargs): 'Retrieves report filters # noqa: E501\n\n Retrieves report filters # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_filters(name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: The report name (required)\n :return: RunReport\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_report_filters_with_http_info(name, **kwargs) else: data = self.get_report_filters_with_http_info(name, **kwargs) return data
def get_report_filters(self, name, **kwargs): 'Retrieves report filters # noqa: E501\n\n Retrieves report filters # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_filters(name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: The report name (required)\n :return: RunReport\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_report_filters_with_http_info(name, **kwargs) else: data = self.get_report_filters_with_http_info(name, **kwargs) return data<|docstring|>Retrieves report filters # noqa: E501 Retrieves report filters # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_report_filters(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The report name (required) :return: RunReport If the method is called asynchronously, returns the request thread.<|endoftext|>
c6cd8f63db40e4de0e552872ff47095c115302c6f0961a638ffca6ad70cdf759
def get_report_filters_with_http_info(self, name, **kwargs): 'Retrieves report filters # noqa: E501\n\n Retrieves report filters # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_filters_with_http_info(name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: The report name (required)\n :return: RunReport\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['name'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method get_report_filters" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('name' not in params) or (params['name'] is None))): raise ValueError('Missing the required parameter `name` when calling `get_report_filters`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/reportFilters/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReport', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Retrieves report filters # noqa: E501 Retrieves report filters # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_report_filters_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The report name (required) :return: RunReport If the method is called asynchronously, returns the request thread.
src/clients/ctm_saas_client/api/reporting_api.py
get_report_filters_with_http_info
IceT-M/ctm-python-client
5
python
def get_report_filters_with_http_info(self, name, **kwargs): 'Retrieves report filters # noqa: E501\n\n Retrieves report filters # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_filters_with_http_info(name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: The report name (required)\n :return: RunReport\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['name'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method get_report_filters" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('name' not in params) or (params['name'] is None))): raise ValueError('Missing the required parameter `name` when calling `get_report_filters`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/reportFilters/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReport', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def get_report_filters_with_http_info(self, name, **kwargs): 'Retrieves report filters # noqa: E501\n\n Retrieves report filters # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_filters_with_http_info(name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: The report name (required)\n :return: RunReport\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['name'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method get_report_filters" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('name' not in params) or (params['name'] is None))): raise ValueError('Missing the required parameter `name` when calling `get_report_filters`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/reportFilters/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReport', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Retrieves report filters # noqa: E501 Retrieves report filters # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_report_filters_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The report name (required) :return: RunReport If the method is called asynchronously, returns the request thread.<|endoftext|>
e64a20729ca09f398a3573327e2ca04001ec94b1528927de37431afaea433126
def get_report_status(self, report_id, **kwargs): 'Retrieves status information for a report generation request based on the report ID # noqa: E501\n\n Retrieves status information for a report generation request based on the report ID # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_status(report_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str report_id: The ID of the report (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_report_status_with_http_info(report_id, **kwargs) else: data = self.get_report_status_with_http_info(report_id, **kwargs) return data
Retrieves status information for a report generation request based on the report ID # noqa: E501 Retrieves status information for a report generation request based on the report ID # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_report_status(report_id, async_req=True) >>> result = thread.get() :param async_req bool :param str report_id: The ID of the report (required) :return: RunReportInfo If the method is called asynchronously, returns the request thread.
src/clients/ctm_saas_client/api/reporting_api.py
get_report_status
IceT-M/ctm-python-client
5
python
def get_report_status(self, report_id, **kwargs): 'Retrieves status information for a report generation request based on the report ID # noqa: E501\n\n Retrieves status information for a report generation request based on the report ID # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_status(report_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str report_id: The ID of the report (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_report_status_with_http_info(report_id, **kwargs) else: data = self.get_report_status_with_http_info(report_id, **kwargs) return data
def get_report_status(self, report_id, **kwargs): 'Retrieves status information for a report generation request based on the report ID # noqa: E501\n\n Retrieves status information for a report generation request based on the report ID # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_status(report_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str report_id: The ID of the report (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_report_status_with_http_info(report_id, **kwargs) else: data = self.get_report_status_with_http_info(report_id, **kwargs) return data<|docstring|>Retrieves status information for a report generation request based on the report ID # noqa: E501 Retrieves status information for a report generation request based on the report ID # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_report_status(report_id, async_req=True) >>> result = thread.get() :param async_req bool :param str report_id: The ID of the report (required) :return: RunReportInfo If the method is called asynchronously, returns the request thread.<|endoftext|>
4edf69a39a50b00ff1fa73417099ccd36320a79637550b6ffdb73aef3639db5c
def get_report_status_with_http_info(self, report_id, **kwargs): 'Retrieves status information for a report generation request based on the report ID # noqa: E501\n\n Retrieves status information for a report generation request based on the report ID # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_status_with_http_info(report_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str report_id: The ID of the report (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['report_id'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method get_report_status" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('report_id' not in params) or (params['report_id'] is None))): raise ValueError('Missing the required parameter `report_id` when calling `get_report_status`') collection_formats = {} path_params = {} if ('report_id' in params): path_params['reportId'] = params['report_id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/status/{reportId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReportInfo', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Retrieves status information for a report generation request based on the report ID # noqa: E501 Retrieves status information for a report generation request based on the report ID # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_report_status_with_http_info(report_id, async_req=True) >>> result = thread.get() :param async_req bool :param str report_id: The ID of the report (required) :return: RunReportInfo If the method is called asynchronously, returns the request thread.
src/clients/ctm_saas_client/api/reporting_api.py
get_report_status_with_http_info
IceT-M/ctm-python-client
5
python
def get_report_status_with_http_info(self, report_id, **kwargs): 'Retrieves status information for a report generation request based on the report ID # noqa: E501\n\n Retrieves status information for a report generation request based on the report ID # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_status_with_http_info(report_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str report_id: The ID of the report (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['report_id'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method get_report_status" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('report_id' not in params) or (params['report_id'] is None))): raise ValueError('Missing the required parameter `report_id` when calling `get_report_status`') collection_formats = {} path_params = {} if ('report_id' in params): path_params['reportId'] = params['report_id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/status/{reportId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReportInfo', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def get_report_status_with_http_info(self, report_id, **kwargs): 'Retrieves status information for a report generation request based on the report ID # noqa: E501\n\n Retrieves status information for a report generation request based on the report ID # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_report_status_with_http_info(report_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str report_id: The ID of the report (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['report_id'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method get_report_status" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('report_id' not in params) or (params['report_id'] is None))): raise ValueError('Missing the required parameter `report_id` when calling `get_report_status`') collection_formats = {} path_params = {} if ('report_id' in params): path_params['reportId'] = params['report_id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/status/{reportId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReportInfo', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Retrieves status information for a report generation request based on the report ID # noqa: E501 Retrieves status information for a report generation request based on the report ID # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_report_status_with_http_info(report_id, async_req=True) >>> result = thread.get() :param async_req bool :param str report_id: The ID of the report (required) :return: RunReportInfo If the method is called asynchronously, returns the request thread.<|endoftext|>
e6647d4e5fda3901c005c87fcabd13054400e878037e5f93d3554c20ff8685fb
def run_report(self, report_run, **kwargs): 'Run a report # noqa: E501\n\n Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.run_report(report_run, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param RunReport report_run: The report generation parameters (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.run_report_with_http_info(report_run, **kwargs) else: data = self.run_report_with_http_info(report_run, **kwargs) return data
Run a report # noqa: E501 Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.run_report(report_run, async_req=True) >>> result = thread.get() :param async_req bool :param RunReport report_run: The report generation parameters (required) :return: RunReportInfo If the method is called asynchronously, returns the request thread.
src/clients/ctm_saas_client/api/reporting_api.py
run_report
IceT-M/ctm-python-client
5
python
def run_report(self, report_run, **kwargs): 'Run a report # noqa: E501\n\n Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.run_report(report_run, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param RunReport report_run: The report generation parameters (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.run_report_with_http_info(report_run, **kwargs) else: data = self.run_report_with_http_info(report_run, **kwargs) return data
def run_report(self, report_run, **kwargs): 'Run a report # noqa: E501\n\n Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.run_report(report_run, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param RunReport report_run: The report generation parameters (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.run_report_with_http_info(report_run, **kwargs) else: data = self.run_report_with_http_info(report_run, **kwargs) return data<|docstring|>Run a report # noqa: E501 Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.run_report(report_run, async_req=True) >>> result = thread.get() :param async_req bool :param RunReport report_run: The report generation parameters (required) :return: RunReportInfo If the method is called asynchronously, returns the request thread.<|endoftext|>
a66afbe471049699f516f671c9196e4dcad9305254075ddeee5f796ff5338e7e
def run_report_with_http_info(self, report_run, **kwargs): 'Run a report # noqa: E501\n\n Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.run_report_with_http_info(report_run, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param RunReport report_run: The report generation parameters (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['report_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method run_report" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('report_run' not in params) or (params['report_run'] is None))): raise ValueError('Missing the required parameter `report_run` when calling `run_report`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('report_run' in params): body_params = params['report_run'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/report', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReportInfo', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Run a report # noqa: E501 Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.run_report_with_http_info(report_run, async_req=True) >>> result = thread.get() :param async_req bool :param RunReport report_run: The report generation parameters (required) :return: RunReportInfo If the method is called asynchronously, returns the request thread.
src/clients/ctm_saas_client/api/reporting_api.py
run_report_with_http_info
IceT-M/ctm-python-client
5
python
def run_report_with_http_info(self, report_run, **kwargs): 'Run a report # noqa: E501\n\n Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.run_report_with_http_info(report_run, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param RunReport report_run: The report generation parameters (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['report_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method run_report" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('report_run' not in params) or (params['report_run'] is None))): raise ValueError('Missing the required parameter `report_run` when calling `run_report`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('report_run' in params): body_params = params['report_run'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/report', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReportInfo', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def run_report_with_http_info(self, report_run, **kwargs): 'Run a report # noqa: E501\n\n Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.run_report_with_http_info(report_run, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param RunReport report_run: The report generation parameters (required)\n :return: RunReportInfo\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['report_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in six.iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method run_report" % key)) params[key] = val del params['kwargs'] if (self.api_client.client_side_validation and (('report_run' not in params) or (params['report_run'] is None))): raise ValueError('Missing the required parameter `report_run` when calling `run_report`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('report_run' in params): body_params = params['report_run'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = ['ApiKeyAuth'] return self.api_client.call_api('/reporting/report', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunReportInfo', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)<|docstring|>Run a report # noqa: E501 Sends a request to generate a report asynchronously and returns the request status. If the report is shared, add [shared:] before the name. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.run_report_with_http_info(report_run, async_req=True) >>> result = thread.get() :param async_req bool :param RunReport report_run: The report generation parameters (required) :return: RunReportInfo If the method is called asynchronously, returns the request thread.<|endoftext|>
85583d735d6b6ee5c65262a06075b15b7faba9635fd4ae96c4e7b7baa447ef10
@handler.cron() def get(self): 'Handle a GET request.' assert self.task for status in ['Processed', 'Duplicate']: testcases = data_types.Testcase.query(ndb_utils.is_true(data_types.Testcase.open), ndb_utils.is_false(data_types.Testcase.one_time_crasher_flag), (data_types.Testcase.status == status)) for testcase in testcases: try: tasks.add_task(self.task, testcase.key.id(), testcase.job_type, queue=tasks.queue_for_testcase(testcase)) except Exception: logs.log_error('Failed to add task.') continue
Handle a GET request.
src/appengine/handlers/cron/recurring_tasks.py
get
almuthanna-athamina/clusterfuzz
5,023
python
@handler.cron() def get(self): assert self.task for status in ['Processed', 'Duplicate']: testcases = data_types.Testcase.query(ndb_utils.is_true(data_types.Testcase.open), ndb_utils.is_false(data_types.Testcase.one_time_crasher_flag), (data_types.Testcase.status == status)) for testcase in testcases: try: tasks.add_task(self.task, testcase.key.id(), testcase.job_type, queue=tasks.queue_for_testcase(testcase)) except Exception: logs.log_error('Failed to add task.') continue
@handler.cron() def get(self): assert self.task for status in ['Processed', 'Duplicate']: testcases = data_types.Testcase.query(ndb_utils.is_true(data_types.Testcase.open), ndb_utils.is_false(data_types.Testcase.one_time_crasher_flag), (data_types.Testcase.status == status)) for testcase in testcases: try: tasks.add_task(self.task, testcase.key.id(), testcase.job_type, queue=tasks.queue_for_testcase(testcase)) except Exception: logs.log_error('Failed to add task.') continue<|docstring|>Handle a GET request.<|endoftext|>
77e7228ab2e2de7c316bafe1c4a95390b068acbf4d4cd40cc8076081bc54e199
def register(self): '\n returns a fake registration payload\n ' return {'name': NAME, 'mac': MAC, 'calib': CALIB, 'rev': PROTOCOL_REVISION}
returns a fake registration payload
tools/tess_simulator_tstamps.py
register
STARS4ALL/tessdb-reports
0
python
def register(self): '\n \n ' return {'name': NAME, 'mac': MAC, 'calib': CALIB, 'rev': PROTOCOL_REVISION}
def register(self): '\n \n ' return {'name': NAME, 'mac': MAC, 'calib': CALIB, 'rev': PROTOCOL_REVISION}<|docstring|>returns a fake registration payload<|endoftext|>
e74f56bdb22abd0c5d6c735b4932ba559034a22372749bad937a755d05ea78ef
def sample(self): '\n returns a fake TESS sample\n ' self.seq += 1 self.freq = round((self.freq + random.uniform((- 1), 1)), 3) self.mag = round((self.mag + random.uniform((- 0.5), 0.5)), 2) self.tamb = round((self.tamb + random.uniform((- 2), 2)), 1) self.tsky = (self.tamb - 30) self.tstamp = datetime.datetime.utcnow().strftime(TSTAMP_FORMAT) return {'seq': self.seq, 'name': NAME, 'freq': self.freq, 'mag': self.mag, 'tamb': self.tamb, 'tsky': self.tsky, 'rev': PROTOCOL_REVISION, 'tstamp': self.tstamp}
returns a fake TESS sample
tools/tess_simulator_tstamps.py
sample
STARS4ALL/tessdb-reports
0
python
def sample(self): '\n \n ' self.seq += 1 self.freq = round((self.freq + random.uniform((- 1), 1)), 3) self.mag = round((self.mag + random.uniform((- 0.5), 0.5)), 2) self.tamb = round((self.tamb + random.uniform((- 2), 2)), 1) self.tsky = (self.tamb - 30) self.tstamp = datetime.datetime.utcnow().strftime(TSTAMP_FORMAT) return {'seq': self.seq, 'name': NAME, 'freq': self.freq, 'mag': self.mag, 'tamb': self.tamb, 'tsky': self.tsky, 'rev': PROTOCOL_REVISION, 'tstamp': self.tstamp}
def sample(self): '\n \n ' self.seq += 1 self.freq = round((self.freq + random.uniform((- 1), 1)), 3) self.mag = round((self.mag + random.uniform((- 0.5), 0.5)), 2) self.tamb = round((self.tamb + random.uniform((- 2), 2)), 1) self.tsky = (self.tamb - 30) self.tstamp = datetime.datetime.utcnow().strftime(TSTAMP_FORMAT) return {'seq': self.seq, 'name': NAME, 'freq': self.freq, 'mag': self.mag, 'tamb': self.tamb, 'tsky': self.tsky, 'rev': PROTOCOL_REVISION, 'tstamp': self.tstamp}<|docstring|>returns a fake TESS sample<|endoftext|>
a79c75ec12f4847604e2cb4803f334b5be9ca56fe7c89a53d00f014d59df1498
def maybe_download(filename, expected_bytes): "Download a file if not present, and make sure it's the right size." if (not os.path.exists(filename)): (filename, _) = urlretrieve((url + filename), filename) statinfo = os.stat(filename) if (statinfo.st_size == expected_bytes): print(('Found and verified %s' % filename)) else: print(statinfo.st_size) raise Exception((('Failed to verify ' + filename) + '. Can you get to it with a browser?')) return filename
Download a file if not present, and make sure it's the right size.
dl_assignment_5_CBOW.py
maybe_download
mr-cloud/deep-learning-udacity
0
python
def maybe_download(filename, expected_bytes): if (not os.path.exists(filename)): (filename, _) = urlretrieve((url + filename), filename) statinfo = os.stat(filename) if (statinfo.st_size == expected_bytes): print(('Found and verified %s' % filename)) else: print(statinfo.st_size) raise Exception((('Failed to verify ' + filename) + '. Can you get to it with a browser?')) return filename
def maybe_download(filename, expected_bytes): if (not os.path.exists(filename)): (filename, _) = urlretrieve((url + filename), filename) statinfo = os.stat(filename) if (statinfo.st_size == expected_bytes): print(('Found and verified %s' % filename)) else: print(statinfo.st_size) raise Exception((('Failed to verify ' + filename) + '. Can you get to it with a browser?')) return filename<|docstring|>Download a file if not present, and make sure it's the right size.<|endoftext|>
6cc6a34a890c0f4e53966b6bf1d19e02872bd4a3086138cda182203f886a73de
def read_data(filename): 'Extract the first file enclosed in a zip file as a list of words' with zipfile.ZipFile(filename) as f: data = tf.compat.as_str(f.read(f.namelist()[0])).split() return data
Extract the first file enclosed in a zip file as a list of words
dl_assignment_5_CBOW.py
read_data
mr-cloud/deep-learning-udacity
0
python
def read_data(filename): with zipfile.ZipFile(filename) as f: data = tf.compat.as_str(f.read(f.namelist()[0])).split() return data
def read_data(filename): with zipfile.ZipFile(filename) as f: data = tf.compat.as_str(f.read(f.namelist()[0])).split() return data<|docstring|>Extract the first file enclosed in a zip file as a list of words<|endoftext|>
a3c13f690e746c2acf9456bdf702020b54bf32f06554044229413c8de526f313
def __init__(self, dbname=None, dbuser=None, dbpass=None, dbhost=None): ' Constructor for this class. ' pass
Constructor for this class.
mediation/packages/bts/network_baseline.py
__init__
gitter-badger/bts-ce
0
python
def __init__(self, dbname=None, dbuser=None, dbpass=None, dbhost=None): ' ' pass
def __init__(self, dbname=None, dbuser=None, dbpass=None, dbhost=None): ' ' pass<|docstring|>Constructor for this class.<|endoftext|>
077b8e75db58ac64e9120c9fe79cc205c95905810bb8b6bca43d1186180321c9
def run(self, vendor_id, tech_id): 'Run network baseline' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() cur.execute('SELECT pk, "name" FROM managedobjects_schemas WHERE tech_pk = %s and vendor_pk = %s', (tech_id, vendor_id)) schema = cur.fetchone() schema_name = schema[1] cur.execute('\n SELECT DISTINCT t1.pk, t1."name" \n FROM managedobjects t1\n INNER JOIN live_network.baseline_parameter_config t2 on t2.mo_pk = t1.pk\n WHERE tech_pk = %s and vendor_pk = %s', (tech_id, vendor_id)) mos = cur.fetchall() for idx in range(len(mos)): mo_name = mos[idx][1] mo_pk = str(mos[idx][0]) print('mo_name: {0} mo_pk: {1}'.format(mo_name, mo_pk)) cur.execute('\n SELECT t1.pk, t1."name" \n FROM vendor_parameters t1\n INNER JOIN live_network.baseline_parameter_config t2 on t2.parameter_pk = t1.pk\n WHERE \n parent_pk = %s ', (mo_pk,)) parameters = cur.fetchall() for i in range(len(parameters)): parameter_pk = parameters[i][0] parameter_name = parameters[i][1] sql = '\n SELECT "{2}" AS parameter, count(1) as cnt\n FROM {0}.{1}\n WHERE \n "{2}" IS NOT NULL AND TRIM("{2}") != \'####\'\n GROUP BY "{2}"\n ORDER BY cnt DESC\n LIMIT 1\n '.format(schema_name, mo_name, parameter_name) print(sql) parameter_value = '' try: cur.execute(sql) parameter_value = cur.fetchone() except: continue if (parameter_value == None): continue print(parameter_value) base_line_value = str(parameter_value[0]).strip() print('base_line_value:{0}'.format(base_line_value)) sql = "\n INSERT INTO live_network.base_line_values\n (pk, parameter_pk, value, date_added, date_modified, added_by, modified_by)\n VALUES \n (\n NEXTVAL('live_network.seq_base_line_values_pk'),\n %s,\n %s,\n now()::timestamp,\n now()::timestamp,\n 0,\n 0\n )\n " try: cur.execute(sql, (parameter_pk, base_line_value)) except Exception as ex: if (ex.pgcode == 23505): update_sql = '\n UPDATE live_network.base_line_values\n SET value = %s,\n date_modified = now()::timestamp,\n modified_by = 0\n WHERE \n paremeter_pk = %s\n ' try: cur.execute(update_sql, (parameter_pk, base_line_value)) except: continue continue
Run network baseline
mediation/packages/bts/network_baseline.py
run
gitter-badger/bts-ce
0
python
def run(self, vendor_id, tech_id): conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() cur.execute('SELECT pk, "name" FROM managedobjects_schemas WHERE tech_pk = %s and vendor_pk = %s', (tech_id, vendor_id)) schema = cur.fetchone() schema_name = schema[1] cur.execute('\n SELECT DISTINCT t1.pk, t1."name" \n FROM managedobjects t1\n INNER JOIN live_network.baseline_parameter_config t2 on t2.mo_pk = t1.pk\n WHERE tech_pk = %s and vendor_pk = %s', (tech_id, vendor_id)) mos = cur.fetchall() for idx in range(len(mos)): mo_name = mos[idx][1] mo_pk = str(mos[idx][0]) print('mo_name: {0} mo_pk: {1}'.format(mo_name, mo_pk)) cur.execute('\n SELECT t1.pk, t1."name" \n FROM vendor_parameters t1\n INNER JOIN live_network.baseline_parameter_config t2 on t2.parameter_pk = t1.pk\n WHERE \n parent_pk = %s ', (mo_pk,)) parameters = cur.fetchall() for i in range(len(parameters)): parameter_pk = parameters[i][0] parameter_name = parameters[i][1] sql = '\n SELECT "{2}" AS parameter, count(1) as cnt\n FROM {0}.{1}\n WHERE \n "{2}" IS NOT NULL AND TRIM("{2}") != \'####\'\n GROUP BY "{2}"\n ORDER BY cnt DESC\n LIMIT 1\n '.format(schema_name, mo_name, parameter_name) print(sql) parameter_value = try: cur.execute(sql) parameter_value = cur.fetchone() except: continue if (parameter_value == None): continue print(parameter_value) base_line_value = str(parameter_value[0]).strip() print('base_line_value:{0}'.format(base_line_value)) sql = "\n INSERT INTO live_network.base_line_values\n (pk, parameter_pk, value, date_added, date_modified, added_by, modified_by)\n VALUES \n (\n NEXTVAL('live_network.seq_base_line_values_pk'),\n %s,\n %s,\n now()::timestamp,\n now()::timestamp,\n 0,\n 0\n )\n " try: cur.execute(sql, (parameter_pk, base_line_value)) except Exception as ex: if (ex.pgcode == 23505): update_sql = '\n UPDATE live_network.base_line_values\n SET value = %s,\n date_modified = now()::timestamp,\n modified_by = 0\n WHERE \n paremeter_pk = %s\n ' try: cur.execute(update_sql, (parameter_pk, base_line_value)) except: continue continue
def run(self, vendor_id, tech_id): conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() cur.execute('SELECT pk, "name" FROM managedobjects_schemas WHERE tech_pk = %s and vendor_pk = %s', (tech_id, vendor_id)) schema = cur.fetchone() schema_name = schema[1] cur.execute('\n SELECT DISTINCT t1.pk, t1."name" \n FROM managedobjects t1\n INNER JOIN live_network.baseline_parameter_config t2 on t2.mo_pk = t1.pk\n WHERE tech_pk = %s and vendor_pk = %s', (tech_id, vendor_id)) mos = cur.fetchall() for idx in range(len(mos)): mo_name = mos[idx][1] mo_pk = str(mos[idx][0]) print('mo_name: {0} mo_pk: {1}'.format(mo_name, mo_pk)) cur.execute('\n SELECT t1.pk, t1."name" \n FROM vendor_parameters t1\n INNER JOIN live_network.baseline_parameter_config t2 on t2.parameter_pk = t1.pk\n WHERE \n parent_pk = %s ', (mo_pk,)) parameters = cur.fetchall() for i in range(len(parameters)): parameter_pk = parameters[i][0] parameter_name = parameters[i][1] sql = '\n SELECT "{2}" AS parameter, count(1) as cnt\n FROM {0}.{1}\n WHERE \n "{2}" IS NOT NULL AND TRIM("{2}") != \'####\'\n GROUP BY "{2}"\n ORDER BY cnt DESC\n LIMIT 1\n '.format(schema_name, mo_name, parameter_name) print(sql) parameter_value = try: cur.execute(sql) parameter_value = cur.fetchone() except: continue if (parameter_value == None): continue print(parameter_value) base_line_value = str(parameter_value[0]).strip() print('base_line_value:{0}'.format(base_line_value)) sql = "\n INSERT INTO live_network.base_line_values\n (pk, parameter_pk, value, date_added, date_modified, added_by, modified_by)\n VALUES \n (\n NEXTVAL('live_network.seq_base_line_values_pk'),\n %s,\n %s,\n now()::timestamp,\n now()::timestamp,\n 0,\n 0\n )\n " try: cur.execute(sql, (parameter_pk, base_line_value)) except Exception as ex: if (ex.pgcode == 23505): update_sql = '\n UPDATE live_network.base_line_values\n SET value = %s,\n date_modified = now()::timestamp,\n modified_by = 0\n WHERE \n paremeter_pk = %s\n ' try: cur.execute(update_sql, (parameter_pk, base_line_value)) except: continue continue<|docstring|>Run network baseline<|endoftext|>
f9bcf05bf936dbf78dce497dc1b6f493654e97546ea38e08187e96c72eb6c6c9
def generate_huawei_2g_discrencies(self): 'Generate Huawei 2G network baseline discrepancies' self.generate_huawei_2g_cell_level_discrepancies()
Generate Huawei 2G network baseline discrepancies
mediation/packages/bts/network_baseline.py
generate_huawei_2g_discrencies
gitter-badger/bts-ce
0
python
def generate_huawei_2g_discrencies(self): self.generate_huawei_2g_cell_level_discrepancies()
def generate_huawei_2g_discrencies(self): self.generate_huawei_2g_cell_level_discrepancies()<|docstring|>Generate Huawei 2G network baseline discrepancies<|endoftext|>
23a8268214294e68473bcf85e83e69a37eeffeb16e742b65e10c6593226bb958
def generate_huawei_2g_cell_level_discrepancies(self): 'Generate Huawei 2G baseline descripancies for cell level parameters' engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 1\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.GCELL gcell ON gcell."CELLID" = t_mo."CELLID" AND gcell.neid = t_mo.neid \n AND gcell.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.network_baseline \n (node, site, cellname, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1\n LEFT JOIN network_audit.network_baseline TT2 on TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.cellname is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.network_baseline TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.cellname IS NULL\n )\n DELETE FROM network_audit.network_baseline t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.network_baseline TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.network_baseline AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)
Generate Huawei 2G baseline descripancies for cell level parameters
mediation/packages/bts/network_baseline.py
generate_huawei_2g_cell_level_discrepancies
gitter-badger/bts-ce
0
python
def generate_huawei_2g_cell_level_discrepancies(self): engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 1\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.GCELL gcell ON gcell."CELLID" = t_mo."CELLID" AND gcell.neid = t_mo.neid \n AND gcell.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.network_baseline \n (node, site, cellname, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1\n LEFT JOIN network_audit.network_baseline TT2 on TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.cellname is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.network_baseline TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.cellname IS NULL\n )\n DELETE FROM network_audit.network_baseline t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.network_baseline TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.network_baseline AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)
def generate_huawei_2g_cell_level_discrepancies(self): engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 1\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.GCELL gcell ON gcell."CELLID" = t_mo."CELLID" AND gcell.neid = t_mo.neid \n AND gcell.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.network_baseline \n (node, site, cellname, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1\n LEFT JOIN network_audit.network_baseline TT2 on TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.cellname is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.network_baseline TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.cellname IS NULL\n )\n DELETE FROM network_audit.network_baseline t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.network_baseline TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n gcell."CELLNAME" as cellname,\n gcell."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = \'Radio\'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.network_baseline AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)<|docstring|>Generate Huawei 2G baseline descripancies for cell level parameters<|endoftext|>
a6ea1e07ae4e127830166f1e105c947f70a9a1c29e06586a3c2e3c0ad55dcf7a
def generate_huawei_2g_site_level_discrepancies(self): 'Generate Huawei 2G baseline discrepancies for site level parameters' engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 4\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 4\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.BTS p_mo ON p_mo."BTSID" = t_mo."BTSID" AND p_mo.neid = t_mo.neid \n AND p_mo.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.baseline_site_parameters \n (node, site, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1\n LEFT JOIN network_audit.baseline_site_parameters TT2 on TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.site is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.baseline_site_parameters TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.site IS NULL\n )\n DELETE FROM network_audit.baseline_site_parameters t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.baseline_site_parameters TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.baseline_site_parameters AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)
Generate Huawei 2G baseline discrepancies for site level parameters
mediation/packages/bts/network_baseline.py
generate_huawei_2g_site_level_discrepancies
gitter-badger/bts-ce
0
python
def generate_huawei_2g_site_level_discrepancies(self): engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 4\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 4\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.BTS p_mo ON p_mo."BTSID" = t_mo."BTSID" AND p_mo.neid = t_mo.neid \n AND p_mo.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.baseline_site_parameters \n (node, site, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1\n LEFT JOIN network_audit.baseline_site_parameters TT2 on TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.site is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.baseline_site_parameters TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.site IS NULL\n )\n DELETE FROM network_audit.baseline_site_parameters t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.baseline_site_parameters TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.baseline_site_parameters AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)
def generate_huawei_2g_site_level_discrepancies(self): engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 4\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 4\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.BTS p_mo ON p_mo."BTSID" = t_mo."BTSID" AND p_mo.neid = t_mo.neid \n AND p_mo.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.baseline_site_parameters \n (node, site, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1\n LEFT JOIN network_audit.baseline_site_parameters TT2 on TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.site is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.baseline_site_parameters TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.site IS NULL\n )\n DELETE FROM network_audit.baseline_site_parameters t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.baseline_site_parameters TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo."BTSNAME" as sitename,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.sites t7 on t7.name = t4.sitename \n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t7.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t7.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.baseline_site_parameters AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)<|docstring|>Generate Huawei 2G baseline discrepancies for site level parameters<|endoftext|>
f3c6e4959ee7d07618d43c0dabf1a251df725347963301d8a6f3b193128cad05
def generate_huawei_2g_node_level_discrepancies(self): 'Generate Huawei 2G baseline discrepancies for node level parameters' engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 7 -- BSC\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 7 -- BSC\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.BSCBASIC p_mo ON p_mo.neid = t_mo.neid \n AND p_mo.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.baseline_node_parameters \n (node, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value \n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1\n LEFT JOIN network_audit.baseline_node_parameters TT2 on TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.node is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.baseline_node_parameters TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.node IS NULL\n )\n DELETE FROM network_audit.baseline_node_parameters t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.baseline_node_parameters TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.baseline_node_parameters AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)
Generate Huawei 2G baseline discrepancies for node level parameters
mediation/packages/bts/network_baseline.py
generate_huawei_2g_node_level_discrepancies
gitter-badger/bts-ce
0
python
def generate_huawei_2g_node_level_discrepancies(self): engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 7 -- BSC\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 7 -- BSC\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.BSCBASIC p_mo ON p_mo.neid = t_mo.neid \n AND p_mo.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.baseline_node_parameters \n (node, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value \n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1\n LEFT JOIN network_audit.baseline_node_parameters TT2 on TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.node is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.baseline_node_parameters TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.node IS NULL\n )\n DELETE FROM network_audit.baseline_node_parameters t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.baseline_node_parameters TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.baseline_node_parameters AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)
def generate_huawei_2g_node_level_discrepancies(self): engine = create_engine('postgresql://bodastage:password@database/bts') vendor_pk = 2 tech_pk = 1 schema_name = 'hua_cm_2g' conn = psycopg2.connect('dbname=bts user=bodastage password=password host=database') conn.autocommit = True cur = conn.cursor() sql = '\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 7 -- BSC\n '.format(vendor_pk, tech_pk) cur.execute(sql) mo_list = cur.fetchall() for mo in mo_list: (mo_name, mo_pk, mo_affect_level) = mo sql = "\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 7 -- BSC\n WHERE\n t3.name = '{}'\n ".format(vendor_pk, tech_pk, mo_name) cur.execute(sql) parameters = cur.fetchall() attr_list = [p[0] for p in parameters] str_param_values = ','.join(['t_mo.{0}{1}{0}'.format('"', p) for p in attr_list]) str_param_names = ','.join(['{0}{1}{0}'.format("'", p) for p in attr_list]) cell_level_join = ' INNER JOIN {0}.BSCBASIC p_mo ON p_mo.neid = t_mo.neid \n AND p_mo.module_type = t_mo.module_type '.format(schema_name) sql = '\n INSERT INTO network_audit.baseline_node_parameters \n (node, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value \n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1\n LEFT JOIN network_audit.baseline_node_parameters TT2 on TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.node is NULL\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.baseline_node_parameters TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.node IS NULL\n )\n DELETE FROM network_audit.baseline_node_parameters t1\n WHERE t1.pk IN (SELECT pk from rd)\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql) sql = '\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.baseline_node_parameters TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n \'{2}\' as "MO",\n p_mo.neid as node,\n p_mo."varDateTime" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.nodes t8 on t8.name = t4.node\n INNER JOIN vendors t9 on t9.pk = t8.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t8.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.baseline_node_parameters AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART(\'day\',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n '.format(str_param_names, str_param_values, mo_name, cell_level_join) print(sql) cur.execute(sql)<|docstring|>Generate Huawei 2G baseline discrepancies for node level parameters<|endoftext|>
f35e9f78a8797651f9e8d72a5e448fcc106422287362df027471912ea90a9ece
def create_genres_from_dictionary(): '\n Returns a sorted list of Genre objects from the dictionary of genres\n ' youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=API_KEY) dict_of_genre = create_genres_dictionary() list_of_genres = [] dict_keys = sorted(dict_of_genre.keys()) for i in dict_keys: tmp = dict_of_genre[i] for j in tmp: search_response = youtube.search().list(q=j, part='snippet', type='playlist', maxResults=1).execute() for result in search_response.get('items', []): playlist_id = str(result['id']['playlistId']) playlist_response = youtube.playlists().list(part='player', id=playlist_id, maxResults=1).execute() for playlist_res in playlist_response.get('items', []): genre = Genre(name=j, playlist_embed_tag=playlist_res['player'], playlist_url='None') list_of_genres.append(genre) list_of_genres.sort(key=(lambda x: x.name), reverse=True) return list_of_genres
Returns a sorted list of Genre objects from the dictionary of genres
genre_explorer/genres/genres_playlist.py
create_genres_from_dictionary
Mikerah/GenreExplorer
1
python
def create_genres_from_dictionary(): '\n \n ' youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=API_KEY) dict_of_genre = create_genres_dictionary() list_of_genres = [] dict_keys = sorted(dict_of_genre.keys()) for i in dict_keys: tmp = dict_of_genre[i] for j in tmp: search_response = youtube.search().list(q=j, part='snippet', type='playlist', maxResults=1).execute() for result in search_response.get('items', []): playlist_id = str(result['id']['playlistId']) playlist_response = youtube.playlists().list(part='player', id=playlist_id, maxResults=1).execute() for playlist_res in playlist_response.get('items', []): genre = Genre(name=j, playlist_embed_tag=playlist_res['player'], playlist_url='None') list_of_genres.append(genre) list_of_genres.sort(key=(lambda x: x.name), reverse=True) return list_of_genres
def create_genres_from_dictionary(): '\n \n ' youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=API_KEY) dict_of_genre = create_genres_dictionary() list_of_genres = [] dict_keys = sorted(dict_of_genre.keys()) for i in dict_keys: tmp = dict_of_genre[i] for j in tmp: search_response = youtube.search().list(q=j, part='snippet', type='playlist', maxResults=1).execute() for result in search_response.get('items', []): playlist_id = str(result['id']['playlistId']) playlist_response = youtube.playlists().list(part='player', id=playlist_id, maxResults=1).execute() for playlist_res in playlist_response.get('items', []): genre = Genre(name=j, playlist_embed_tag=playlist_res['player'], playlist_url='None') list_of_genres.append(genre) list_of_genres.sort(key=(lambda x: x.name), reverse=True) return list_of_genres<|docstring|>Returns a sorted list of Genre objects from the dictionary of genres<|endoftext|>
d6a0df8d15d94b8e5cdd0655d8c6840169b654298fc7b381a61ad10f2d970442
async def test_verify_successful_account_history_subscription(event_loop): 'Test account history subscription handling' api_token = os.environ['BP_PRO_API_TOKEN'] test_host = os.environ['TEST_HOST'] when_subscribed = event_loop.create_future() when_unsubscribed = event_loop.create_future() async def handle_message(json_message): if (json_message['type'] == 'SUBSCRIPTIONS'): when_subscribed.set_result('subscribed') elif (json_message['type'] == 'UNSUBSCRIBED'): when_unsubscribed.set_result('unsubscribed') else: LOG.info('Ignored Message %s', json_message) client = BitpandaProWebsocketClient(api_token, test_host, handle_message) subscription = AccountHistorySubscription() (await client.start(Subscriptions([subscription]))) LOG.info((await when_subscribed)) (await client.unsubscribe(Unsubscription([ChannelName.account_history.value]))) LOG.info((await when_unsubscribed)) (await client.close())
Test account history subscription handling
bpprosdk/tests/test_account_history_channel.py
test_verify_successful_account_history_subscription
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_verify_successful_account_history_subscription(event_loop): api_token = os.environ['BP_PRO_API_TOKEN'] test_host = os.environ['TEST_HOST'] when_subscribed = event_loop.create_future() when_unsubscribed = event_loop.create_future() async def handle_message(json_message): if (json_message['type'] == 'SUBSCRIPTIONS'): when_subscribed.set_result('subscribed') elif (json_message['type'] == 'UNSUBSCRIBED'): when_unsubscribed.set_result('unsubscribed') else: LOG.info('Ignored Message %s', json_message) client = BitpandaProWebsocketClient(api_token, test_host, handle_message) subscription = AccountHistorySubscription() (await client.start(Subscriptions([subscription]))) LOG.info((await when_subscribed)) (await client.unsubscribe(Unsubscription([ChannelName.account_history.value]))) LOG.info((await when_unsubscribed)) (await client.close())
async def test_verify_successful_account_history_subscription(event_loop): api_token = os.environ['BP_PRO_API_TOKEN'] test_host = os.environ['TEST_HOST'] when_subscribed = event_loop.create_future() when_unsubscribed = event_loop.create_future() async def handle_message(json_message): if (json_message['type'] == 'SUBSCRIPTIONS'): when_subscribed.set_result('subscribed') elif (json_message['type'] == 'UNSUBSCRIBED'): when_unsubscribed.set_result('unsubscribed') else: LOG.info('Ignored Message %s', json_message) client = BitpandaProWebsocketClient(api_token, test_host, handle_message) subscription = AccountHistorySubscription() (await client.start(Subscriptions([subscription]))) LOG.info((await when_subscribed)) (await client.unsubscribe(Unsubscription([ChannelName.account_history.value]))) LOG.info((await when_unsubscribed)) (await client.close())<|docstring|>Test account history subscription handling<|endoftext|>
271e0c22cf4e51863850850aa5bcb3f63da9ac14715f1e5856e7233b1991b001
async def log_messages(json_message): 'Callback only logging messages' LOG.debug('message: %s', json_message)
Callback only logging messages
bpprosdk/tests/test_account_history_channel.py
log_messages
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def log_messages(json_message): LOG.debug('message: %s', json_message)
async def log_messages(json_message): LOG.debug('message: %s', json_message)<|docstring|>Callback only logging messages<|endoftext|>
6fae4ba00268e4390313865d79f741831e35c3603fb1948a50c2dca48ee4180f
async def test_handle_account_balances(): 'Test account balance snapshot handling' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137'))
Test account balance snapshot handling
bpprosdk/tests/test_account_history_channel.py
test_handle_account_balances
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_handle_account_balances(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137'))
async def test_handle_account_balances(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137'))<|docstring|>Test account balance snapshot handling<|endoftext|>
615f0d00b5a86269562935fa922771662c5d85c5825ad073c418a99627c6f574
async def test_handle_active_orders_snapshot(): 'Test active orders snapshot handling' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(active_orders_snapshot_json)) open_orders = client.state.open_orders_by_order_id assert (len(open_orders) == 1), 'expected 1 order' order = open_orders.get('6894fe05-4071-49ca-813e-d88d3621e168') assert (order.instrument_code == 'BTC_EUR') assert (order.order_id == '6894fe05-4071-49ca-813e-d88d3621e168') assert (order.type == 'LIMIT') assert (order.time_in_force == 'GOOD_TILL_CANCELLED') assert (order.side == 'SELL') assert (order.price == Decimal('18500.0')) assert (order.remaining == Decimal('0.1')) assert (order.client_id == '082e0b7c-1888-4db2-b53e-208b64ae09b3')
Test active orders snapshot handling
bpprosdk/tests/test_account_history_channel.py
test_handle_active_orders_snapshot
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_handle_active_orders_snapshot(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(active_orders_snapshot_json)) open_orders = client.state.open_orders_by_order_id assert (len(open_orders) == 1), 'expected 1 order' order = open_orders.get('6894fe05-4071-49ca-813e-d88d3621e168') assert (order.instrument_code == 'BTC_EUR') assert (order.order_id == '6894fe05-4071-49ca-813e-d88d3621e168') assert (order.type == 'LIMIT') assert (order.time_in_force == 'GOOD_TILL_CANCELLED') assert (order.side == 'SELL') assert (order.price == Decimal('18500.0')) assert (order.remaining == Decimal('0.1')) assert (order.client_id == '082e0b7c-1888-4db2-b53e-208b64ae09b3')
async def test_handle_active_orders_snapshot(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(active_orders_snapshot_json)) open_orders = client.state.open_orders_by_order_id assert (len(open_orders) == 1), 'expected 1 order' order = open_orders.get('6894fe05-4071-49ca-813e-d88d3621e168') assert (order.instrument_code == 'BTC_EUR') assert (order.order_id == '6894fe05-4071-49ca-813e-d88d3621e168') assert (order.type == 'LIMIT') assert (order.time_in_force == 'GOOD_TILL_CANCELLED') assert (order.side == 'SELL') assert (order.price == Decimal('18500.0')) assert (order.remaining == Decimal('0.1')) assert (order.client_id == '082e0b7c-1888-4db2-b53e-208b64ae09b3')<|docstring|>Test active orders snapshot handling<|endoftext|>
141ffdf4f9e8e6213ded601076ef1f37008b65c27a25b9348c4f922e59eb44ab
async def test_handle_active_orders_snapshot_multiple_instruments(): 'Test active orders snapshot handling' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(active_orders_snapshot_multiple_instruments_json)) open_orders = client.state.open_orders_by_order_id assert (len(open_orders) == 3) btc_eur_order = open_orders.get('ce246752-18c9-41a1-872e-759a0016b9c3') assert (btc_eur_order.instrument_code == 'BTC_EUR') eth_eur_order = open_orders.get('94cd6c5a-5ab8-4678-b932-7f81083d1f08') assert (eth_eur_order.instrument_code == 'ETH_EUR')
Test active orders snapshot handling
bpprosdk/tests/test_account_history_channel.py
test_handle_active_orders_snapshot_multiple_instruments
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_handle_active_orders_snapshot_multiple_instruments(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(active_orders_snapshot_multiple_instruments_json)) open_orders = client.state.open_orders_by_order_id assert (len(open_orders) == 3) btc_eur_order = open_orders.get('ce246752-18c9-41a1-872e-759a0016b9c3') assert (btc_eur_order.instrument_code == 'BTC_EUR') eth_eur_order = open_orders.get('94cd6c5a-5ab8-4678-b932-7f81083d1f08') assert (eth_eur_order.instrument_code == 'ETH_EUR')
async def test_handle_active_orders_snapshot_multiple_instruments(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(active_orders_snapshot_multiple_instruments_json)) open_orders = client.state.open_orders_by_order_id assert (len(open_orders) == 3) btc_eur_order = open_orders.get('ce246752-18c9-41a1-872e-759a0016b9c3') assert (btc_eur_order.instrument_code == 'BTC_EUR') eth_eur_order = open_orders.get('94cd6c5a-5ab8-4678-b932-7f81083d1f08') assert (eth_eur_order.instrument_code == 'ETH_EUR')<|docstring|>Test active orders snapshot handling<|endoftext|>
7f0684b82b0229339afa2f911a8af6b06d392f6254b3ea4762fdcd70b5541e4f
async def test_handle_inactive_orders_snapshot(): 'Test handling of inactive orders snapshot' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(inactive_orders_snapshot_json)) inactive_orders = client.state.last_24h_inactive_orders assert (len(inactive_orders) == 4), 'expected 4 orders' order = inactive_orders.get('297bd6d8-ae68-4547-b414-0bfc87d13019') assert (order.instrument_code == 'BTC_EUR') assert (order.filled_amount == Decimal('0.2'))
Test handling of inactive orders snapshot
bpprosdk/tests/test_account_history_channel.py
test_handle_inactive_orders_snapshot
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_handle_inactive_orders_snapshot(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(inactive_orders_snapshot_json)) inactive_orders = client.state.last_24h_inactive_orders assert (len(inactive_orders) == 4), 'expected 4 orders' order = inactive_orders.get('297bd6d8-ae68-4547-b414-0bfc87d13019') assert (order.instrument_code == 'BTC_EUR') assert (order.filled_amount == Decimal('0.2'))
async def test_handle_inactive_orders_snapshot(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(inactive_orders_snapshot_json)) inactive_orders = client.state.last_24h_inactive_orders assert (len(inactive_orders) == 4), 'expected 4 orders' order = inactive_orders.get('297bd6d8-ae68-4547-b414-0bfc87d13019') assert (order.instrument_code == 'BTC_EUR') assert (order.filled_amount == Decimal('0.2'))<|docstring|>Test handling of inactive orders snapshot<|endoftext|>
1a01b36eed667672ec02848e01cc40e00998a600d0e20b728ce05549a5ae5f26
async def test_handle_order_created_and_then_close(): ' Test handling of created order events' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) (await client.handle_message(order_created_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(order_closed_json)) order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (order is None) inactive_order = client.state.inactive_orders.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order is None) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4'))
Test handling of created order events
bpprosdk/tests/test_account_history_channel.py
test_handle_order_created_and_then_close
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_handle_order_created_and_then_close(): ' ' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) (await client.handle_message(order_created_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(order_closed_json)) order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (order is None) inactive_order = client.state.inactive_orders.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order is None) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4'))
async def test_handle_order_created_and_then_close(): ' ' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) (await client.handle_message(order_created_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(order_closed_json)) order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (order is None) inactive_order = client.state.inactive_orders.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order is None) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4'))<|docstring|>Test handling of created order events<|endoftext|>
73879c54fdcab1b4025600186588f758024697b9e811a9efa23559f4a0e40839
async def test_handle_trade_settled_updates(): 'Test trade settlement events' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) (await client.handle_message(order_created_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) (await client.handle_message(order_created_orders_channel_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is not None) assert (expected_order.remaining == Decimal('1.0')) assert (expected_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order.price == '8500.0') balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(trade_settled_partially_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('0.9')) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is not None) assert (expected_order.remaining == Decimal('1.0')) assert (expected_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order.price == '8500.0') (await client.handle_message(trade_settled_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('0.4')) client.apply_trading_buffer() (await client.handle_message(trade_settled_order_done_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) inactive_order = client.state.inactive_orders.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order is not None) assert (inactive_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order.remaining == Decimal('0.0')) assert (inactive_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order.price == '8500.0')
Test trade settlement events
bpprosdk/tests/test_account_history_channel.py
test_handle_trade_settled_updates
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_handle_trade_settled_updates(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) (await client.handle_message(order_created_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) (await client.handle_message(order_created_orders_channel_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is not None) assert (expected_order.remaining == Decimal('1.0')) assert (expected_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order.price == '8500.0') balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(trade_settled_partially_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('0.9')) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is not None) assert (expected_order.remaining == Decimal('1.0')) assert (expected_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order.price == '8500.0') (await client.handle_message(trade_settled_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('0.4')) client.apply_trading_buffer() (await client.handle_message(trade_settled_order_done_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) inactive_order = client.state.inactive_orders.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order is not None) assert (inactive_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order.remaining == Decimal('0.0')) assert (inactive_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order.price == '8500.0')
async def test_handle_trade_settled_updates(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) (await client.handle_message(order_created_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) (await client.handle_message(order_created_orders_channel_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is not None) assert (expected_order.remaining == Decimal('1.0')) assert (expected_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order.price == '8500.0') balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(trade_settled_partially_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('0.9')) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is not None) assert (expected_order.remaining == Decimal('1.0')) assert (expected_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order.price == '8500.0') (await client.handle_message(trade_settled_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('0.4')) client.apply_trading_buffer() (await client.handle_message(trade_settled_order_done_json)) expected_order = client.state.open_orders_by_order_id.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (expected_order is None) inactive_order = client.state.inactive_orders.get('65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order is not None) assert (inactive_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order.remaining == Decimal('0.0')) assert (inactive_order.order_id == '65ecb524-4a7f-4b22-aa44-ec0b38d3db9c') assert (inactive_order.price == '8500.0')<|docstring|>Test trade settlement events<|endoftext|>
b974c8205d4477e9add9b8235a817988cecc9141ab935c310afbc637ed945bde
async def test_handle_out_of_order_sequenced_message(): 'Test situations when an event arrives with an older sequence' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) (await client.handle_message(order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(old_seq_order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(newer_seq_order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8569.228764802')) assert (balance.locked == Decimal('2.2'))
Test situations when an event arrives with an older sequence
bpprosdk/tests/test_account_history_channel.py
test_handle_out_of_order_sequenced_message
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_handle_out_of_order_sequenced_message(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) (await client.handle_message(order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(old_seq_order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(newer_seq_order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8569.228764802')) assert (balance.locked == Decimal('2.2'))
async def test_handle_out_of_order_sequenced_message(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) (await client.handle_message(order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(old_seq_order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8974.828764802')) assert (balance.locked == Decimal('1.4')) (await client.handle_message(newer_seq_order_created_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8569.228764802')) assert (balance.locked == Decimal('2.2'))<|docstring|>Test situations when an event arrives with an older sequence<|endoftext|>
5b088fb91a3afe40bb27f04fc4eea27221b2022b084fb3d081c626e9adbe8728
async def test_deposit_of_funds(): 'Verify correct balance after deposit' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0')) (await client.handle_message(account_balance_deposit)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8976.938764802')) assert (balance.locked == Decimal('0.5')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0'))
Verify correct balance after deposit
bpprosdk/tests/test_account_history_channel.py
test_deposit_of_funds
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_deposit_of_funds(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0')) (await client.handle_message(account_balance_deposit)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8976.938764802')) assert (balance.locked == Decimal('0.5')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0'))
async def test_deposit_of_funds(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0')) (await client.handle_message(account_balance_deposit)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8976.938764802')) assert (balance.locked == Decimal('0.5')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0'))<|docstring|>Verify correct balance after deposit<|endoftext|>
f9f6712a4d50d11d5153987b93d696e053ecbbe92481054059263b2608a3184f
async def test_withdrawal_of_funds(): 'Verify correct balance after withdrawal' client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0')) (await client.handle_message(account_balance_withdrawal)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.608764802')) assert (balance.locked == Decimal('0.12')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0'))
Verify correct balance after withdrawal
bpprosdk/tests/test_account_history_channel.py
test_withdrawal_of_funds
Tibi-Bitpanda/bitpanda-pro-sdk-py
17
python
async def test_withdrawal_of_funds(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0')) (await client.handle_message(account_balance_withdrawal)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.608764802')) assert (balance.locked == Decimal('0.12')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0'))
async def test_withdrawal_of_funds(): client = AdvancedBitpandaProWebsocketClient('irrelevant', 'irrelevant', log_messages) (await client.handle_message(account_balances_json)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.828764802')) assert (balance.locked == Decimal('0.4')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0')) (await client.handle_message(account_balance_withdrawal)) balance = client.state.balances['BTC'] assert (balance.available == Decimal('8975.608764802')) assert (balance.locked == Decimal('0.12')) balance = client.state.balances['EUR'] assert (balance.available == Decimal('6606076.62363137')) assert (balance.locked == Decimal('0.0'))<|docstring|>Verify correct balance after withdrawal<|endoftext|>
e14dada747348a348bbbff1364f7e3f2813a8d9151bcb6d652389ac71812bdd1
def _ax_to_bytes(ax): 'Helper function to convert figure to png file.\n\n Args:\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n\n Returns:\n bytes: Byte representation of the diagram in png format.\n ' f = io.BytesIO() fig = ax.figure FigureCanvasAgg(fig) fig.savefig(f, format='png') fig.clf() return f.getvalue()
Helper function to convert figure to png file. Args: ax (:class:`matplotlib.axes.Axes`): Axes object to plot. Returns: bytes: Byte representation of the diagram in png format.
freud/plot.py
_ax_to_bytes
yjw0510/freud
0
python
def _ax_to_bytes(ax): 'Helper function to convert figure to png file.\n\n Args:\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n\n Returns:\n bytes: Byte representation of the diagram in png format.\n ' f = io.BytesIO() fig = ax.figure FigureCanvasAgg(fig) fig.savefig(f, format='png') fig.clf() return f.getvalue()
def _ax_to_bytes(ax): 'Helper function to convert figure to png file.\n\n Args:\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n\n Returns:\n bytes: Byte representation of the diagram in png format.\n ' f = io.BytesIO() fig = ax.figure FigureCanvasAgg(fig) fig.savefig(f, format='png') fig.clf() return f.getvalue()<|docstring|>Helper function to convert figure to png file. Args: ax (:class:`matplotlib.axes.Axes`): Axes object to plot. Returns: bytes: Byte representation of the diagram in png format.<|endoftext|>
fd6b29e7e03620e1664f5532e41e8b21a2eb2f51ea5eca02c44388aa247f17c1
def _set_3d_axes_equal(ax, limits=None): "Make axes of 3D plot have equal scale so that spheres appear as spheres,\n cubes as cubes, etc. This is one possible solution to Matplotlib's\n ax.set_aspect('equal') and ax.axis('equal') not working for 3D.\n\n Args:\n ax (:class:`matplotlib.axes.Axes`): Axes object.\n limits (:math:`(3, 2)` :class:`np.ndarray`):\n Axis limits in the form\n :code:`[[xmin, xmax], [ymin, ymax], [zmin, zmax]]`. If\n :code:`None`, the limits are auto-detected (Default value =\n :code:`None`).\n " if (limits is None): limits = np.array([ax.get_xlim3d(), ax.get_ylim3d(), ax.get_zlim3d()]) else: limits = np.asarray(limits) origin = np.mean(limits, axis=1) radius = (0.5 * np.max((limits[(:, 1)] - limits[(:, 0)]))) ax.set_xlim3d([(origin[0] - radius), (origin[0] + radius)]) ax.set_ylim3d([(origin[1] - radius), (origin[1] + radius)]) ax.set_zlim3d([(origin[2] - radius), (origin[2] + radius)]) return ax
Make axes of 3D plot have equal scale so that spheres appear as spheres, cubes as cubes, etc. This is one possible solution to Matplotlib's ax.set_aspect('equal') and ax.axis('equal') not working for 3D. Args: ax (:class:`matplotlib.axes.Axes`): Axes object. limits (:math:`(3, 2)` :class:`np.ndarray`): Axis limits in the form :code:`[[xmin, xmax], [ymin, ymax], [zmin, zmax]]`. If :code:`None`, the limits are auto-detected (Default value = :code:`None`).
freud/plot.py
_set_3d_axes_equal
yjw0510/freud
0
python
def _set_3d_axes_equal(ax, limits=None): "Make axes of 3D plot have equal scale so that spheres appear as spheres,\n cubes as cubes, etc. This is one possible solution to Matplotlib's\n ax.set_aspect('equal') and ax.axis('equal') not working for 3D.\n\n Args:\n ax (:class:`matplotlib.axes.Axes`): Axes object.\n limits (:math:`(3, 2)` :class:`np.ndarray`):\n Axis limits in the form\n :code:`[[xmin, xmax], [ymin, ymax], [zmin, zmax]]`. If\n :code:`None`, the limits are auto-detected (Default value =\n :code:`None`).\n " if (limits is None): limits = np.array([ax.get_xlim3d(), ax.get_ylim3d(), ax.get_zlim3d()]) else: limits = np.asarray(limits) origin = np.mean(limits, axis=1) radius = (0.5 * np.max((limits[(:, 1)] - limits[(:, 0)]))) ax.set_xlim3d([(origin[0] - radius), (origin[0] + radius)]) ax.set_ylim3d([(origin[1] - radius), (origin[1] + radius)]) ax.set_zlim3d([(origin[2] - radius), (origin[2] + radius)]) return ax
def _set_3d_axes_equal(ax, limits=None): "Make axes of 3D plot have equal scale so that spheres appear as spheres,\n cubes as cubes, etc. This is one possible solution to Matplotlib's\n ax.set_aspect('equal') and ax.axis('equal') not working for 3D.\n\n Args:\n ax (:class:`matplotlib.axes.Axes`): Axes object.\n limits (:math:`(3, 2)` :class:`np.ndarray`):\n Axis limits in the form\n :code:`[[xmin, xmax], [ymin, ymax], [zmin, zmax]]`. If\n :code:`None`, the limits are auto-detected (Default value =\n :code:`None`).\n " if (limits is None): limits = np.array([ax.get_xlim3d(), ax.get_ylim3d(), ax.get_zlim3d()]) else: limits = np.asarray(limits) origin = np.mean(limits, axis=1) radius = (0.5 * np.max((limits[(:, 1)] - limits[(:, 0)]))) ax.set_xlim3d([(origin[0] - radius), (origin[0] + radius)]) ax.set_ylim3d([(origin[1] - radius), (origin[1] + radius)]) ax.set_zlim3d([(origin[2] - radius), (origin[2] + radius)]) return ax<|docstring|>Make axes of 3D plot have equal scale so that spheres appear as spheres, cubes as cubes, etc. This is one possible solution to Matplotlib's ax.set_aspect('equal') and ax.axis('equal') not working for 3D. Args: ax (:class:`matplotlib.axes.Axes`): Axes object. limits (:math:`(3, 2)` :class:`np.ndarray`): Axis limits in the form :code:`[[xmin, xmax], [ymin, ymax], [zmin, zmax]]`. If :code:`None`, the limits are auto-detected (Default value = :code:`None`).<|endoftext|>
7eb45e80ef971a26622dfa215f04f1fce881597eb3666c1ee7da8566e752797d
def box_plot(box, title=None, ax=None, image=[0, 0, 0], *args, **kwargs): 'Helper function to plot a :class:`~.box.Box` object.\n\n Args:\n box (:class:`~.box.Box`):\n Simulation box.\n title (str):\n Title of the graph. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n If plotting a 3D box, the axes must be 3D.\n (Default value = :code:`None`).\n image (list):\n The periodic image location at which to draw the box (Default\n value = :code:`[0, 0, 0]`).\n ``*args``, ``**kwargs``:\n All other arguments are passed on to\n :meth:`mpl_toolkits.mplot3d.Axes3D.plot` or\n :meth:`matplotlib.axes.Axes.plot`.\n ' box = freud.box.Box.from_box(box) if (ax is None): fig = Figure() if box.is2D: ax = fig.subplots() else: from mpl_toolkits.mplot3d import Axes3D ax = fig.add_subplot(111, projection='3d') if box.is2D: corners = [[0, 0, 0], [0, 1, 0], [1, 1, 0], [1, 0, 0]] corners.append(corners[0]) corners = np.asarray(corners) corners += np.asarray(image) corners = box.make_absolute(corners)[(:, :2)] color = kwargs.pop('color', 'k') ax.plot(corners[(:, 0)], corners[(:, 1)], *args, color=color, **kwargs) ax.set_aspect('equal', 'datalim') ax.set_xlabel('$x$') ax.set_ylabel('$y$') else: corners = np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [1, 0, 0], [1, 0, 1], [1, 1, 0], [1, 1, 1]]) corners += np.asarray(image) corners = box.make_absolute(corners) paths = [corners[[0, 1, 3, 2, 0]], corners[[4, 5, 7, 6, 4]], corners[[0, 4]], corners[[1, 5]], corners[[2, 6]], corners[[3, 7]]] for path in paths: color = kwargs.pop('color', 'k') ax.plot(path[(:, 0)], path[(:, 1)], path[(:, 2)], color=color) ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax.set_zlabel('$z$') limits = [[corners[(0, 0)], corners[((- 1), 0)]], [corners[(0, 1)], corners[((- 1), 1)]], [corners[(0, 2)], corners[((- 1), 2)]]] _set_3d_axes_equal(ax, limits) return ax
Helper function to plot a :class:`~.box.Box` object. Args: box (:class:`~.box.Box`): Simulation box. title (str): Title of the graph. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. If plotting a 3D box, the axes must be 3D. (Default value = :code:`None`). image (list): The periodic image location at which to draw the box (Default value = :code:`[0, 0, 0]`). ``*args``, ``**kwargs``: All other arguments are passed on to :meth:`mpl_toolkits.mplot3d.Axes3D.plot` or :meth:`matplotlib.axes.Axes.plot`.
freud/plot.py
box_plot
yjw0510/freud
0
python
def box_plot(box, title=None, ax=None, image=[0, 0, 0], *args, **kwargs): 'Helper function to plot a :class:`~.box.Box` object.\n\n Args:\n box (:class:`~.box.Box`):\n Simulation box.\n title (str):\n Title of the graph. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n If plotting a 3D box, the axes must be 3D.\n (Default value = :code:`None`).\n image (list):\n The periodic image location at which to draw the box (Default\n value = :code:`[0, 0, 0]`).\n ``*args``, ``**kwargs``:\n All other arguments are passed on to\n :meth:`mpl_toolkits.mplot3d.Axes3D.plot` or\n :meth:`matplotlib.axes.Axes.plot`.\n ' box = freud.box.Box.from_box(box) if (ax is None): fig = Figure() if box.is2D: ax = fig.subplots() else: from mpl_toolkits.mplot3d import Axes3D ax = fig.add_subplot(111, projection='3d') if box.is2D: corners = [[0, 0, 0], [0, 1, 0], [1, 1, 0], [1, 0, 0]] corners.append(corners[0]) corners = np.asarray(corners) corners += np.asarray(image) corners = box.make_absolute(corners)[(:, :2)] color = kwargs.pop('color', 'k') ax.plot(corners[(:, 0)], corners[(:, 1)], *args, color=color, **kwargs) ax.set_aspect('equal', 'datalim') ax.set_xlabel('$x$') ax.set_ylabel('$y$') else: corners = np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [1, 0, 0], [1, 0, 1], [1, 1, 0], [1, 1, 1]]) corners += np.asarray(image) corners = box.make_absolute(corners) paths = [corners[[0, 1, 3, 2, 0]], corners[[4, 5, 7, 6, 4]], corners[[0, 4]], corners[[1, 5]], corners[[2, 6]], corners[[3, 7]]] for path in paths: color = kwargs.pop('color', 'k') ax.plot(path[(:, 0)], path[(:, 1)], path[(:, 2)], color=color) ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax.set_zlabel('$z$') limits = [[corners[(0, 0)], corners[((- 1), 0)]], [corners[(0, 1)], corners[((- 1), 1)]], [corners[(0, 2)], corners[((- 1), 2)]]] _set_3d_axes_equal(ax, limits) return ax
def box_plot(box, title=None, ax=None, image=[0, 0, 0], *args, **kwargs): 'Helper function to plot a :class:`~.box.Box` object.\n\n Args:\n box (:class:`~.box.Box`):\n Simulation box.\n title (str):\n Title of the graph. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n If plotting a 3D box, the axes must be 3D.\n (Default value = :code:`None`).\n image (list):\n The periodic image location at which to draw the box (Default\n value = :code:`[0, 0, 0]`).\n ``*args``, ``**kwargs``:\n All other arguments are passed on to\n :meth:`mpl_toolkits.mplot3d.Axes3D.plot` or\n :meth:`matplotlib.axes.Axes.plot`.\n ' box = freud.box.Box.from_box(box) if (ax is None): fig = Figure() if box.is2D: ax = fig.subplots() else: from mpl_toolkits.mplot3d import Axes3D ax = fig.add_subplot(111, projection='3d') if box.is2D: corners = [[0, 0, 0], [0, 1, 0], [1, 1, 0], [1, 0, 0]] corners.append(corners[0]) corners = np.asarray(corners) corners += np.asarray(image) corners = box.make_absolute(corners)[(:, :2)] color = kwargs.pop('color', 'k') ax.plot(corners[(:, 0)], corners[(:, 1)], *args, color=color, **kwargs) ax.set_aspect('equal', 'datalim') ax.set_xlabel('$x$') ax.set_ylabel('$y$') else: corners = np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [1, 0, 0], [1, 0, 1], [1, 1, 0], [1, 1, 1]]) corners += np.asarray(image) corners = box.make_absolute(corners) paths = [corners[[0, 1, 3, 2, 0]], corners[[4, 5, 7, 6, 4]], corners[[0, 4]], corners[[1, 5]], corners[[2, 6]], corners[[3, 7]]] for path in paths: color = kwargs.pop('color', 'k') ax.plot(path[(:, 0)], path[(:, 1)], path[(:, 2)], color=color) ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax.set_zlabel('$z$') limits = [[corners[(0, 0)], corners[((- 1), 0)]], [corners[(0, 1)], corners[((- 1), 1)]], [corners[(0, 2)], corners[((- 1), 2)]]] _set_3d_axes_equal(ax, limits) return ax<|docstring|>Helper function to plot a :class:`~.box.Box` object. Args: box (:class:`~.box.Box`): Simulation box. title (str): Title of the graph. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. If plotting a 3D box, the axes must be 3D. (Default value = :code:`None`). image (list): The periodic image location at which to draw the box (Default value = :code:`[0, 0, 0]`). ``*args``, ``**kwargs``: All other arguments are passed on to :meth:`mpl_toolkits.mplot3d.Axes3D.plot` or :meth:`matplotlib.axes.Axes.plot`.<|endoftext|>
15e2f7c1334f1a27816404c91afd2a1e9cd72d4de91a7bc47c0363c6b447a1ff
def system_plot(system, title=None, ax=None, *args, **kwargs): 'Helper function to plot a system object.\n\n Args:\n system\n Any object that is a valid argument to\n :class:`freud.locality.NeighborQuery.from_system`.\n title (str):\n Title of the plot. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n ' system = freud.locality.NeighborQuery.from_system(system) if (ax is None): fig = Figure() if system.box.is2D: ax = fig.subplots() else: from mpl_toolkits.mplot3d import Axes3D ax = fig.add_subplot(111, projection='3d') if system.box.is2D: box_plot(system.box, ax=ax) sc = ax.scatter(system.points[(:, 0)], system.points[(:, 1)], *args, **kwargs) ax.set_aspect('equal', 'datalim') else: box_plot(system.box, ax=ax) sc = ax.scatter(system.points[(:, 0)], system.points[(:, 1)], system.points[(:, 2)], *args, **kwargs) box_min = system.box.make_absolute([0, 0, 0]) box_max = system.box.make_absolute([1, 1, 1]) points_min = np.min(system.points, axis=0) points_max = np.max(system.points, axis=0) limits = [[np.min([box_min[i], points_min[i]]), np.max([box_max[i], points_max[i]])] for i in range(3)] _set_3d_axes_equal(ax, limits=limits) return (ax, sc)
Helper function to plot a system object. Args: system Any object that is a valid argument to :class:`freud.locality.NeighborQuery.from_system`. title (str): Title of the plot. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`).
freud/plot.py
system_plot
yjw0510/freud
0
python
def system_plot(system, title=None, ax=None, *args, **kwargs): 'Helper function to plot a system object.\n\n Args:\n system\n Any object that is a valid argument to\n :class:`freud.locality.NeighborQuery.from_system`.\n title (str):\n Title of the plot. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n ' system = freud.locality.NeighborQuery.from_system(system) if (ax is None): fig = Figure() if system.box.is2D: ax = fig.subplots() else: from mpl_toolkits.mplot3d import Axes3D ax = fig.add_subplot(111, projection='3d') if system.box.is2D: box_plot(system.box, ax=ax) sc = ax.scatter(system.points[(:, 0)], system.points[(:, 1)], *args, **kwargs) ax.set_aspect('equal', 'datalim') else: box_plot(system.box, ax=ax) sc = ax.scatter(system.points[(:, 0)], system.points[(:, 1)], system.points[(:, 2)], *args, **kwargs) box_min = system.box.make_absolute([0, 0, 0]) box_max = system.box.make_absolute([1, 1, 1]) points_min = np.min(system.points, axis=0) points_max = np.max(system.points, axis=0) limits = [[np.min([box_min[i], points_min[i]]), np.max([box_max[i], points_max[i]])] for i in range(3)] _set_3d_axes_equal(ax, limits=limits) return (ax, sc)
def system_plot(system, title=None, ax=None, *args, **kwargs): 'Helper function to plot a system object.\n\n Args:\n system\n Any object that is a valid argument to\n :class:`freud.locality.NeighborQuery.from_system`.\n title (str):\n Title of the plot. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n ' system = freud.locality.NeighborQuery.from_system(system) if (ax is None): fig = Figure() if system.box.is2D: ax = fig.subplots() else: from mpl_toolkits.mplot3d import Axes3D ax = fig.add_subplot(111, projection='3d') if system.box.is2D: box_plot(system.box, ax=ax) sc = ax.scatter(system.points[(:, 0)], system.points[(:, 1)], *args, **kwargs) ax.set_aspect('equal', 'datalim') else: box_plot(system.box, ax=ax) sc = ax.scatter(system.points[(:, 0)], system.points[(:, 1)], system.points[(:, 2)], *args, **kwargs) box_min = system.box.make_absolute([0, 0, 0]) box_max = system.box.make_absolute([1, 1, 1]) points_min = np.min(system.points, axis=0) points_max = np.max(system.points, axis=0) limits = [[np.min([box_min[i], points_min[i]]), np.max([box_max[i], points_max[i]])] for i in range(3)] _set_3d_axes_equal(ax, limits=limits) return (ax, sc)<|docstring|>Helper function to plot a system object. Args: system Any object that is a valid argument to :class:`freud.locality.NeighborQuery.from_system`. title (str): Title of the plot. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`).<|endoftext|>
f7a3b37f8f8efaed1e76c9cb0c6e84bc171d97c2f0b393f10542b1d6a2ad11a6
def bar_plot(x, height, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a bar graph.\n\n Args:\n x (list): x values of the bar graph.\n height (list): Height values corresponding to :code:`x`.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.bar(x=x, height=height) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) ax.set_xticks(x) ax.set_xticklabels(x) return ax
Helper function to draw a bar graph. Args: x (list): x values of the bar graph. height (list): Height values corresponding to :code:`x`. title (str): Title of the graph. (Default value = :code:`None`). xlabel (str): Label of x axis. (Default value = :code:`None`). ylabel (str): Label of y axis. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.
freud/plot.py
bar_plot
yjw0510/freud
0
python
def bar_plot(x, height, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a bar graph.\n\n Args:\n x (list): x values of the bar graph.\n height (list): Height values corresponding to :code:`x`.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.bar(x=x, height=height) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) ax.set_xticks(x) ax.set_xticklabels(x) return ax
def bar_plot(x, height, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a bar graph.\n\n Args:\n x (list): x values of the bar graph.\n height (list): Height values corresponding to :code:`x`.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.bar(x=x, height=height) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) ax.set_xticks(x) ax.set_xticklabels(x) return ax<|docstring|>Helper function to draw a bar graph. Args: x (list): x values of the bar graph. height (list): Height values corresponding to :code:`x`. title (str): Title of the graph. (Default value = :code:`None`). xlabel (str): Label of x axis. (Default value = :code:`None`). ylabel (str): Label of y axis. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.<|endoftext|>
6cdb6c19e987e218916ed474a79fa40982a7e233bec0a29036b1322a0ea5ee2f
def clusters_plot(keys, freqs, num_clusters_to_plot=10, ax=None): 'Helper function to plot most frequent clusters in a bar graph.\n\n Args:\n keys (list): Cluster keys.\n freqs (list): Number of particles in each clusters.\n num_clusters_to_plot (unsigned int): Number of largest clusters to\n plot.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' count_sorted = sorted([(freq, key) for (key, freq) in zip(keys, freqs)], key=(lambda x: (- x[0]))) sorted_freqs = [i[0] for i in count_sorted[:num_clusters_to_plot]] sorted_keys = [str(i[1]) for i in count_sorted[:num_clusters_to_plot]] return bar_plot(sorted_keys, sorted_freqs, title='Cluster Frequency', xlabel='Keys of {} largest clusters (total clusters: {})'.format(len(sorted_freqs), len(freqs)), ylabel='Number of particles', ax=ax)
Helper function to plot most frequent clusters in a bar graph. Args: keys (list): Cluster keys. freqs (list): Number of particles in each clusters. num_clusters_to_plot (unsigned int): Number of largest clusters to plot. ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.
freud/plot.py
clusters_plot
yjw0510/freud
0
python
def clusters_plot(keys, freqs, num_clusters_to_plot=10, ax=None): 'Helper function to plot most frequent clusters in a bar graph.\n\n Args:\n keys (list): Cluster keys.\n freqs (list): Number of particles in each clusters.\n num_clusters_to_plot (unsigned int): Number of largest clusters to\n plot.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' count_sorted = sorted([(freq, key) for (key, freq) in zip(keys, freqs)], key=(lambda x: (- x[0]))) sorted_freqs = [i[0] for i in count_sorted[:num_clusters_to_plot]] sorted_keys = [str(i[1]) for i in count_sorted[:num_clusters_to_plot]] return bar_plot(sorted_keys, sorted_freqs, title='Cluster Frequency', xlabel='Keys of {} largest clusters (total clusters: {})'.format(len(sorted_freqs), len(freqs)), ylabel='Number of particles', ax=ax)
def clusters_plot(keys, freqs, num_clusters_to_plot=10, ax=None): 'Helper function to plot most frequent clusters in a bar graph.\n\n Args:\n keys (list): Cluster keys.\n freqs (list): Number of particles in each clusters.\n num_clusters_to_plot (unsigned int): Number of largest clusters to\n plot.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' count_sorted = sorted([(freq, key) for (key, freq) in zip(keys, freqs)], key=(lambda x: (- x[0]))) sorted_freqs = [i[0] for i in count_sorted[:num_clusters_to_plot]] sorted_keys = [str(i[1]) for i in count_sorted[:num_clusters_to_plot]] return bar_plot(sorted_keys, sorted_freqs, title='Cluster Frequency', xlabel='Keys of {} largest clusters (total clusters: {})'.format(len(sorted_freqs), len(freqs)), ylabel='Number of particles', ax=ax)<|docstring|>Helper function to plot most frequent clusters in a bar graph. Args: keys (list): Cluster keys. freqs (list): Number of particles in each clusters. num_clusters_to_plot (unsigned int): Number of largest clusters to plot. ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.<|endoftext|>
29bdc274e61c753fd73d58b4a35705e871e78b6778878249b7080a9ee476958e
def line_plot(x, y, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a line graph.\n\n Args:\n x (list): x values of the line graph.\n y (list): y values corresponding to :code:`x`.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.plot(x, y) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) return ax
Helper function to draw a line graph. Args: x (list): x values of the line graph. y (list): y values corresponding to :code:`x`. title (str): Title of the graph. (Default value = :code:`None`). xlabel (str): Label of x axis. (Default value = :code:`None`). ylabel (str): Label of y axis. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.
freud/plot.py
line_plot
yjw0510/freud
0
python
def line_plot(x, y, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a line graph.\n\n Args:\n x (list): x values of the line graph.\n y (list): y values corresponding to :code:`x`.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.plot(x, y) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) return ax
def line_plot(x, y, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a line graph.\n\n Args:\n x (list): x values of the line graph.\n y (list): y values corresponding to :code:`x`.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.plot(x, y) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) return ax<|docstring|>Helper function to draw a line graph. Args: x (list): x values of the line graph. y (list): y values corresponding to :code:`x`. title (str): Title of the graph. (Default value = :code:`None`). xlabel (str): Label of x axis. (Default value = :code:`None`). ylabel (str): Label of y axis. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.<|endoftext|>
80c0171596055c18b7af8feb8971f6f1d5a442d283f234d4ee65775c630aa6c2
def histogram_plot(values, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a histogram graph.\n\n Args:\n values (list): values of the histogram.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.hist(values) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) return ax
Helper function to draw a histogram graph. Args: values (list): values of the histogram. title (str): Title of the graph. (Default value = :code:`None`). xlabel (str): Label of x axis. (Default value = :code:`None`). ylabel (str): Label of y axis. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.
freud/plot.py
histogram_plot
yjw0510/freud
0
python
def histogram_plot(values, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a histogram graph.\n\n Args:\n values (list): values of the histogram.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.hist(values) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) return ax
def histogram_plot(values, title=None, xlabel=None, ylabel=None, ax=None): 'Helper function to draw a histogram graph.\n\n Args:\n values (list): values of the histogram.\n title (str): Title of the graph. (Default value = :code:`None`).\n xlabel (str): Label of x axis. (Default value = :code:`None`).\n ylabel (str): Label of y axis. (Default value = :code:`None`).\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' if (ax is None): fig = Figure() ax = fig.subplots() ax.hist(values) ax.set_title(title) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) return ax<|docstring|>Helper function to draw a histogram graph. Args: values (list): values of the histogram. title (str): Title of the graph. (Default value = :code:`None`). xlabel (str): Label of x axis. (Default value = :code:`None`). ylabel (str): Label of y axis. (Default value = :code:`None`). ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.<|endoftext|>
1b9c1f8908555e8867fd405e8bb6cfac9c1c26cc4834de6d0219745193297344
def pmft_plot(pmft, ax=None): 'Helper function to draw 2D PMFT diagram.\n\n Args:\n pmft (:class:`freud.pmft.PMFTXY2D`):\n PMFTXY2D instance.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() pmft_arr = np.copy(pmft.PMFT) pmft_arr[np.isinf(pmft_arr)] = np.nan xlims = (pmft.X[0], pmft.X[(- 1)]) ylims = (pmft.Y[0], pmft.Y[(- 1)]) ax.set_xlim(xlims) ax.set_ylim(ylims) ax.xaxis.set_ticks([i for i in range(int(xlims[0]), int((xlims[1] + 1)))]) ax.yaxis.set_ticks([i for i in range(int(ylims[0]), int((ylims[1] + 1)))]) ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax.set_title('PMFT') ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') im = ax.imshow(np.flipud(pmft_arr), extent=[xlims[0], xlims[1], ylims[0], ylims[1]], interpolation='nearest', cmap='viridis', vmin=(- 2.5), vmax=3.0) cb = Colorbar(cax, im) cb.set_label('$k_B T$') return ax
Helper function to draw 2D PMFT diagram. Args: pmft (:class:`freud.pmft.PMFTXY2D`): PMFTXY2D instance. ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.
freud/plot.py
pmft_plot
yjw0510/freud
0
python
def pmft_plot(pmft, ax=None): 'Helper function to draw 2D PMFT diagram.\n\n Args:\n pmft (:class:`freud.pmft.PMFTXY2D`):\n PMFTXY2D instance.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() pmft_arr = np.copy(pmft.PMFT) pmft_arr[np.isinf(pmft_arr)] = np.nan xlims = (pmft.X[0], pmft.X[(- 1)]) ylims = (pmft.Y[0], pmft.Y[(- 1)]) ax.set_xlim(xlims) ax.set_ylim(ylims) ax.xaxis.set_ticks([i for i in range(int(xlims[0]), int((xlims[1] + 1)))]) ax.yaxis.set_ticks([i for i in range(int(ylims[0]), int((ylims[1] + 1)))]) ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax.set_title('PMFT') ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') im = ax.imshow(np.flipud(pmft_arr), extent=[xlims[0], xlims[1], ylims[0], ylims[1]], interpolation='nearest', cmap='viridis', vmin=(- 2.5), vmax=3.0) cb = Colorbar(cax, im) cb.set_label('$k_B T$') return ax
def pmft_plot(pmft, ax=None): 'Helper function to draw 2D PMFT diagram.\n\n Args:\n pmft (:class:`freud.pmft.PMFTXY2D`):\n PMFTXY2D instance.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() pmft_arr = np.copy(pmft.PMFT) pmft_arr[np.isinf(pmft_arr)] = np.nan xlims = (pmft.X[0], pmft.X[(- 1)]) ylims = (pmft.Y[0], pmft.Y[(- 1)]) ax.set_xlim(xlims) ax.set_ylim(ylims) ax.xaxis.set_ticks([i for i in range(int(xlims[0]), int((xlims[1] + 1)))]) ax.yaxis.set_ticks([i for i in range(int(ylims[0]), int((ylims[1] + 1)))]) ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax.set_title('PMFT') ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') im = ax.imshow(np.flipud(pmft_arr), extent=[xlims[0], xlims[1], ylims[0], ylims[1]], interpolation='nearest', cmap='viridis', vmin=(- 2.5), vmax=3.0) cb = Colorbar(cax, im) cb.set_label('$k_B T$') return ax<|docstring|>Helper function to draw 2D PMFT diagram. Args: pmft (:class:`freud.pmft.PMFTXY2D`): PMFTXY2D instance. ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.<|endoftext|>
feb2f5f0bfd3f7cb3837738cc8311c3c441ef4e47328a07acd083fbcac6116cf
def density_plot(density, box, ax=None): 'Helper function to plot density diagram.\n\n Args:\n density (:math:`\\left(N_x, N_y\\right)` :class:`numpy.ndarray`):\n Array containing density.\n box (:class:`freud.box.Box`):\n Simulation box.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() xlims = (((- box.Lx) / 2), (box.Lx / 2)) ylims = (((- box.Ly) / 2), (box.Ly / 2)) ax.set_title('Gaussian Density') ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') im = ax.imshow(np.flipud(density.T), extent=[xlims[0], xlims[1], ylims[0], ylims[1]]) cb = Colorbar(cax, im) cb.set_label('Density') return ax
Helper function to plot density diagram. Args: density (:math:`\left(N_x, N_y\right)` :class:`numpy.ndarray`): Array containing density. box (:class:`freud.box.Box`): Simulation box. ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.
freud/plot.py
density_plot
yjw0510/freud
0
python
def density_plot(density, box, ax=None): 'Helper function to plot density diagram.\n\n Args:\n density (:math:`\\left(N_x, N_y\\right)` :class:`numpy.ndarray`):\n Array containing density.\n box (:class:`freud.box.Box`):\n Simulation box.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() xlims = (((- box.Lx) / 2), (box.Lx / 2)) ylims = (((- box.Ly) / 2), (box.Ly / 2)) ax.set_title('Gaussian Density') ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') im = ax.imshow(np.flipud(density.T), extent=[xlims[0], xlims[1], ylims[0], ylims[1]]) cb = Colorbar(cax, im) cb.set_label('Density') return ax
def density_plot(density, box, ax=None): 'Helper function to plot density diagram.\n\n Args:\n density (:math:`\\left(N_x, N_y\\right)` :class:`numpy.ndarray`):\n Array containing density.\n box (:class:`freud.box.Box`):\n Simulation box.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() xlims = (((- box.Lx) / 2), (box.Lx / 2)) ylims = (((- box.Ly) / 2), (box.Ly / 2)) ax.set_title('Gaussian Density') ax.set_xlabel('$x$') ax.set_ylabel('$y$') ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') im = ax.imshow(np.flipud(density.T), extent=[xlims[0], xlims[1], ylims[0], ylims[1]]) cb = Colorbar(cax, im) cb.set_label('Density') return ax<|docstring|>Helper function to plot density diagram. Args: density (:math:`\left(N_x, N_y\right)` :class:`numpy.ndarray`): Array containing density. box (:class:`freud.box.Box`): Simulation box. ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.<|endoftext|>
0bea5235e5106e530d21e5ea11eac21ac6b7402e4eff52d9bb2d8b49bc3f2a13
def voronoi_plot(box, polytopes, ax=None, color_by_sides=True, cmap=None): 'Helper function to draw 2D Voronoi diagram.\n\n Args:\n box (:class:`freud.box.Box`):\n Simulation box.\n polytopes (:class:`numpy.ndarray`):\n Array containing Voronoi polytope vertices.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n color_by_sides (bool):\n If :code:`True`, color cells by the number of sides.\n If :code:`False`, random colors are used for each cell.\n (Default value = :code:`True`).\n cmap (str):\n Colormap name to use (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from matplotlib import cm from matplotlib.collections import PatchCollection from matplotlib.patches import Polygon from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() patches = [Polygon(poly[(:, :2)]) for poly in polytopes] patch_collection = PatchCollection(patches, edgecolors='black', alpha=0.4) if color_by_sides: colors = np.array([len(poly) for poly in polytopes]) else: colors = np.random.RandomState().permutation(np.arange(len(patches))) cmap = cm.get_cmap(('Set1' if (cmap is None) else cmap), np.unique(colors).size) bounds = np.arange(np.min(colors), (np.max(colors) + 1)) patch_collection.set_array((np.array(colors) - 0.5)) patch_collection.set_cmap(cmap) patch_collection.set_clim((bounds[0] - 0.5), (bounds[(- 1)] + 0.5)) ax.add_collection(patch_collection) corners = [[0, 0, 0], [0, 1, 0], [1, 1, 0], [1, 0, 0]] corners.append(corners[0]) corners = box.make_absolute(corners)[(:, :2)] ax.plot(corners[(:, 0)], corners[(:, 1)], color='k') ax.set_title('Voronoi Diagram') ax.set_xlim((np.min(corners[(:, 0)]), np.max(corners[(:, 0)]))) ax.set_ylim((np.min(corners[(:, 1)]), np.max(corners[(:, 1)]))) ax.set_aspect('equal', 'datalim') if color_by_sides: ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') cb = Colorbar(cax, patch_collection) cb.set_label('Number of sides') cb.set_ticks(bounds) return ax
Helper function to draw 2D Voronoi diagram. Args: box (:class:`freud.box.Box`): Simulation box. polytopes (:class:`numpy.ndarray`): Array containing Voronoi polytope vertices. ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). color_by_sides (bool): If :code:`True`, color cells by the number of sides. If :code:`False`, random colors are used for each cell. (Default value = :code:`True`). cmap (str): Colormap name to use (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.
freud/plot.py
voronoi_plot
yjw0510/freud
0
python
def voronoi_plot(box, polytopes, ax=None, color_by_sides=True, cmap=None): 'Helper function to draw 2D Voronoi diagram.\n\n Args:\n box (:class:`freud.box.Box`):\n Simulation box.\n polytopes (:class:`numpy.ndarray`):\n Array containing Voronoi polytope vertices.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n color_by_sides (bool):\n If :code:`True`, color cells by the number of sides.\n If :code:`False`, random colors are used for each cell.\n (Default value = :code:`True`).\n cmap (str):\n Colormap name to use (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from matplotlib import cm from matplotlib.collections import PatchCollection from matplotlib.patches import Polygon from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() patches = [Polygon(poly[(:, :2)]) for poly in polytopes] patch_collection = PatchCollection(patches, edgecolors='black', alpha=0.4) if color_by_sides: colors = np.array([len(poly) for poly in polytopes]) else: colors = np.random.RandomState().permutation(np.arange(len(patches))) cmap = cm.get_cmap(('Set1' if (cmap is None) else cmap), np.unique(colors).size) bounds = np.arange(np.min(colors), (np.max(colors) + 1)) patch_collection.set_array((np.array(colors) - 0.5)) patch_collection.set_cmap(cmap) patch_collection.set_clim((bounds[0] - 0.5), (bounds[(- 1)] + 0.5)) ax.add_collection(patch_collection) corners = [[0, 0, 0], [0, 1, 0], [1, 1, 0], [1, 0, 0]] corners.append(corners[0]) corners = box.make_absolute(corners)[(:, :2)] ax.plot(corners[(:, 0)], corners[(:, 1)], color='k') ax.set_title('Voronoi Diagram') ax.set_xlim((np.min(corners[(:, 0)]), np.max(corners[(:, 0)]))) ax.set_ylim((np.min(corners[(:, 1)]), np.max(corners[(:, 1)]))) ax.set_aspect('equal', 'datalim') if color_by_sides: ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') cb = Colorbar(cax, patch_collection) cb.set_label('Number of sides') cb.set_ticks(bounds) return ax
def voronoi_plot(box, polytopes, ax=None, color_by_sides=True, cmap=None): 'Helper function to draw 2D Voronoi diagram.\n\n Args:\n box (:class:`freud.box.Box`):\n Simulation box.\n polytopes (:class:`numpy.ndarray`):\n Array containing Voronoi polytope vertices.\n ax (:class:`matplotlib.axes.Axes`): Axes object to plot.\n If :code:`None`, make a new axes and figure object.\n (Default value = :code:`None`).\n color_by_sides (bool):\n If :code:`True`, color cells by the number of sides.\n If :code:`False`, random colors are used for each cell.\n (Default value = :code:`True`).\n cmap (str):\n Colormap name to use (Default value = :code:`None`).\n\n Returns:\n :class:`matplotlib.axes.Axes`: Axes object with the diagram.\n ' from matplotlib import cm from matplotlib.collections import PatchCollection from matplotlib.patches import Polygon from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable from matplotlib.colorbar import Colorbar if (ax is None): fig = Figure() ax = fig.subplots() patches = [Polygon(poly[(:, :2)]) for poly in polytopes] patch_collection = PatchCollection(patches, edgecolors='black', alpha=0.4) if color_by_sides: colors = np.array([len(poly) for poly in polytopes]) else: colors = np.random.RandomState().permutation(np.arange(len(patches))) cmap = cm.get_cmap(('Set1' if (cmap is None) else cmap), np.unique(colors).size) bounds = np.arange(np.min(colors), (np.max(colors) + 1)) patch_collection.set_array((np.array(colors) - 0.5)) patch_collection.set_cmap(cmap) patch_collection.set_clim((bounds[0] - 0.5), (bounds[(- 1)] + 0.5)) ax.add_collection(patch_collection) corners = [[0, 0, 0], [0, 1, 0], [1, 1, 0], [1, 0, 0]] corners.append(corners[0]) corners = box.make_absolute(corners)[(:, :2)] ax.plot(corners[(:, 0)], corners[(:, 1)], color='k') ax.set_title('Voronoi Diagram') ax.set_xlim((np.min(corners[(:, 0)]), np.max(corners[(:, 0)]))) ax.set_ylim((np.min(corners[(:, 1)]), np.max(corners[(:, 1)]))) ax.set_aspect('equal', 'datalim') if color_by_sides: ax_divider = make_axes_locatable(ax) cax = ax_divider.append_axes('right', size='7%', pad='10%') cb = Colorbar(cax, patch_collection) cb.set_label('Number of sides') cb.set_ticks(bounds) return ax<|docstring|>Helper function to draw 2D Voronoi diagram. Args: box (:class:`freud.box.Box`): Simulation box. polytopes (:class:`numpy.ndarray`): Array containing Voronoi polytope vertices. ax (:class:`matplotlib.axes.Axes`): Axes object to plot. If :code:`None`, make a new axes and figure object. (Default value = :code:`None`). color_by_sides (bool): If :code:`True`, color cells by the number of sides. If :code:`False`, random colors are used for each cell. (Default value = :code:`True`). cmap (str): Colormap name to use (Default value = :code:`None`). Returns: :class:`matplotlib.axes.Axes`: Axes object with the diagram.<|endoftext|>
204b52fcac05162be609a1620668cca1263df651ad29c06412bcc8b27755e08a
def train(model, optimizer, loader, epoch): '\n Train the models on the dataset.\n ' batch_time = AverageMeter('time', ':.2f') data_time = AverageMeter('data time', ':.2f') top1 = AverageMeter('top1', ':.3f') top5 = AverageMeter('top5', ':.3f') losses = AverageMeter('loss', ':.3e') end = time.perf_counter() model.train() criterion = nn.CrossEntropyLoss().cuda() for (iter_epoch, (inp, target)) in enumerate(loader): data_time.update((time.perf_counter() - end)) inp = inp.cuda(non_blocking=True) target = target.cuda(non_blocking=True) output = model(inp) loss = criterion(output, target) optimizer.zero_grad() loss.backward() optimizer.step() (acc1, acc5) = accuracy(output, target, topk=(1, 5)) losses.update(loss.item(), inp.size(0)) top1.update(acc1[0], inp.size(0)) top5.update(acc5[0], inp.size(0)) batch_time.update((time.perf_counter() - end)) end = time.perf_counter() if ((args.rank == 0) and ((iter_epoch % 50) == 0)): logger.info('Epoch[{0}] - Iter: [{1}/{2}]\tTime {batch_time.val:.3f} ({batch_time.avg:.3f})\tData {data_time.val:.3f} ({data_time.avg:.3f})\tLoss {loss.val:.4f} ({loss.avg:.4f})\tPrec {top1.val:.3f} ({top1.avg:.3f})\tLR trunk {lr}\tLR head {lr_W}'.format(epoch, iter_epoch, len(loader), batch_time=batch_time, data_time=data_time, loss=losses, top1=top1, lr=optimizer.param_groups[0]['lr'], lr_W=optimizer.param_groups[1]['lr'])) return (epoch, losses.avg, top1.avg.item(), top5.avg.item())
Train the models on the dataset.
eval_semisup.py
train
JDekun/HCSC
65
python
def train(model, optimizer, loader, epoch): '\n \n ' batch_time = AverageMeter('time', ':.2f') data_time = AverageMeter('data time', ':.2f') top1 = AverageMeter('top1', ':.3f') top5 = AverageMeter('top5', ':.3f') losses = AverageMeter('loss', ':.3e') end = time.perf_counter() model.train() criterion = nn.CrossEntropyLoss().cuda() for (iter_epoch, (inp, target)) in enumerate(loader): data_time.update((time.perf_counter() - end)) inp = inp.cuda(non_blocking=True) target = target.cuda(non_blocking=True) output = model(inp) loss = criterion(output, target) optimizer.zero_grad() loss.backward() optimizer.step() (acc1, acc5) = accuracy(output, target, topk=(1, 5)) losses.update(loss.item(), inp.size(0)) top1.update(acc1[0], inp.size(0)) top5.update(acc5[0], inp.size(0)) batch_time.update((time.perf_counter() - end)) end = time.perf_counter() if ((args.rank == 0) and ((iter_epoch % 50) == 0)): logger.info('Epoch[{0}] - Iter: [{1}/{2}]\tTime {batch_time.val:.3f} ({batch_time.avg:.3f})\tData {data_time.val:.3f} ({data_time.avg:.3f})\tLoss {loss.val:.4f} ({loss.avg:.4f})\tPrec {top1.val:.3f} ({top1.avg:.3f})\tLR trunk {lr}\tLR head {lr_W}'.format(epoch, iter_epoch, len(loader), batch_time=batch_time, data_time=data_time, loss=losses, top1=top1, lr=optimizer.param_groups[0]['lr'], lr_W=optimizer.param_groups[1]['lr'])) return (epoch, losses.avg, top1.avg.item(), top5.avg.item())
def train(model, optimizer, loader, epoch): '\n \n ' batch_time = AverageMeter('time', ':.2f') data_time = AverageMeter('data time', ':.2f') top1 = AverageMeter('top1', ':.3f') top5 = AverageMeter('top5', ':.3f') losses = AverageMeter('loss', ':.3e') end = time.perf_counter() model.train() criterion = nn.CrossEntropyLoss().cuda() for (iter_epoch, (inp, target)) in enumerate(loader): data_time.update((time.perf_counter() - end)) inp = inp.cuda(non_blocking=True) target = target.cuda(non_blocking=True) output = model(inp) loss = criterion(output, target) optimizer.zero_grad() loss.backward() optimizer.step() (acc1, acc5) = accuracy(output, target, topk=(1, 5)) losses.update(loss.item(), inp.size(0)) top1.update(acc1[0], inp.size(0)) top5.update(acc5[0], inp.size(0)) batch_time.update((time.perf_counter() - end)) end = time.perf_counter() if ((args.rank == 0) and ((iter_epoch % 50) == 0)): logger.info('Epoch[{0}] - Iter: [{1}/{2}]\tTime {batch_time.val:.3f} ({batch_time.avg:.3f})\tData {data_time.val:.3f} ({data_time.avg:.3f})\tLoss {loss.val:.4f} ({loss.avg:.4f})\tPrec {top1.val:.3f} ({top1.avg:.3f})\tLR trunk {lr}\tLR head {lr_W}'.format(epoch, iter_epoch, len(loader), batch_time=batch_time, data_time=data_time, loss=losses, top1=top1, lr=optimizer.param_groups[0]['lr'], lr_W=optimizer.param_groups[1]['lr'])) return (epoch, losses.avg, top1.avg.item(), top5.avg.item())<|docstring|>Train the models on the dataset.<|endoftext|>
0acedd35bbb8ddbf07730795f4b80412760c7dc8a414ed3d4211e26944f7cff9
def enable_original_exc_handler(enable): "Enable/disable the original exception handler\n\n This mainly controls how exceptions are printed when an exception is thrown.\n Asynq overrides the exception handler to better display asynq stacktraces,\n but in some circumstances you may want to show original traces.\n\n For example, in Jupyter notebooks, the default exception handler displays\n context on exception lines. Enable this function if you'd like that behavior.\n\n " global _use_original_exc_handler _use_original_exc_handler = enable
Enable/disable the original exception handler This mainly controls how exceptions are printed when an exception is thrown. Asynq overrides the exception handler to better display asynq stacktraces, but in some circumstances you may want to show original traces. For example, in Jupyter notebooks, the default exception handler displays context on exception lines. Enable this function if you'd like that behavior.
asynq/debug.py
enable_original_exc_handler
quora/asynq
662
python
def enable_original_exc_handler(enable): "Enable/disable the original exception handler\n\n This mainly controls how exceptions are printed when an exception is thrown.\n Asynq overrides the exception handler to better display asynq stacktraces,\n but in some circumstances you may want to show original traces.\n\n For example, in Jupyter notebooks, the default exception handler displays\n context on exception lines. Enable this function if you'd like that behavior.\n\n " global _use_original_exc_handler _use_original_exc_handler = enable
def enable_original_exc_handler(enable): "Enable/disable the original exception handler\n\n This mainly controls how exceptions are printed when an exception is thrown.\n Asynq overrides the exception handler to better display asynq stacktraces,\n but in some circumstances you may want to show original traces.\n\n For example, in Jupyter notebooks, the default exception handler displays\n context on exception lines. Enable this function if you'd like that behavior.\n\n " global _use_original_exc_handler _use_original_exc_handler = enable<|docstring|>Enable/disable the original exception handler This mainly controls how exceptions are printed when an exception is thrown. Asynq overrides the exception handler to better display asynq stacktraces, but in some circumstances you may want to show original traces. For example, in Jupyter notebooks, the default exception handler displays context on exception lines. Enable this function if you'd like that behavior.<|endoftext|>
c3cb520960f89a468348468cd2162ed3ca094d218d98ef28949a84837abb8016
def enable_filter_traceback(enable): "Enable/disable replacing asynq boilerplate lines in stacktraces.\n\n These lines are repeated many times in stacktraces of codebases using asynq.\n By default we replace them so it's easier to read the stacktrace, but you can enable\n if you're debugging an issue where it's useful to know the exact lines.\n\n " global _should_filter_traceback _should_filter_traceback = enable
Enable/disable replacing asynq boilerplate lines in stacktraces. These lines are repeated many times in stacktraces of codebases using asynq. By default we replace them so it's easier to read the stacktrace, but you can enable if you're debugging an issue where it's useful to know the exact lines.
asynq/debug.py
enable_filter_traceback
quora/asynq
662
python
def enable_filter_traceback(enable): "Enable/disable replacing asynq boilerplate lines in stacktraces.\n\n These lines are repeated many times in stacktraces of codebases using asynq.\n By default we replace them so it's easier to read the stacktrace, but you can enable\n if you're debugging an issue where it's useful to know the exact lines.\n\n " global _should_filter_traceback _should_filter_traceback = enable
def enable_filter_traceback(enable): "Enable/disable replacing asynq boilerplate lines in stacktraces.\n\n These lines are repeated many times in stacktraces of codebases using asynq.\n By default we replace them so it's easier to read the stacktrace, but you can enable\n if you're debugging an issue where it's useful to know the exact lines.\n\n " global _should_filter_traceback _should_filter_traceback = enable<|docstring|>Enable/disable replacing asynq boilerplate lines in stacktraces. These lines are repeated many times in stacktraces of codebases using asynq. By default we replace them so it's easier to read the stacktrace, but you can enable if you're debugging an issue where it's useful to know the exact lines.<|endoftext|>
08a5e041fdca573a51158097abfe7efdac29f13487746780473d6cbdf6f7705c
def enable_traceback_syntax_highlight(enable): "Enable/disable syntax highlighted stacktraces when using asynq's exception handler." global _use_syntax_highlighting _use_syntax_highlighting = enable
Enable/disable syntax highlighted stacktraces when using asynq's exception handler.
asynq/debug.py
enable_traceback_syntax_highlight
quora/asynq
662
python
def enable_traceback_syntax_highlight(enable): global _use_syntax_highlighting _use_syntax_highlighting = enable
def enable_traceback_syntax_highlight(enable): global _use_syntax_highlighting _use_syntax_highlighting = enable<|docstring|>Enable/disable syntax highlighted stacktraces when using asynq's exception handler.<|endoftext|>
043365675312772f05f7590f072e93d2adb36d78a026a7e0a431a1d2acffb60e
def dump_error(error, tb=None): 'Dumps errors w/async stack traces.' try: stderr.write(('\n' + (format_error(error, tb=tb) or 'No error'))) finally: stdout.flush() stderr.flush()
Dumps errors w/async stack traces.
asynq/debug.py
dump_error
quora/asynq
662
python
def dump_error(error, tb=None): try: stderr.write(('\n' + (format_error(error, tb=tb) or 'No error'))) finally: stdout.flush() stderr.flush()
def dump_error(error, tb=None): try: stderr.write(('\n' + (format_error(error, tb=tb) or 'No error'))) finally: stdout.flush() stderr.flush()<|docstring|>Dumps errors w/async stack traces.<|endoftext|>
88b2846c3d52a705058db294799a16ccb9b7551bf651da8256dafbb4dcf314da
def format_error(error, tb=None): 'Formats errors w/async stack traces.' if (error is None): return None result = '' if (hasattr(error, '_traceback') or (tb is not None)): tb = (tb or error._traceback) tb_list = traceback.format_exception(error.__class__, error, tb) elif isinstance(error, BaseException): tb_list = traceback.format_exception_only(error.__class__, error) else: tb_list = [] tb_text = ''.join(tb_list) if isinstance(tb_text, bytes): tb_text = tb_text.decode('utf-8', 'replace') if _use_syntax_highlighting: tb_text = syntax_highlight_tb(tb_text) if _should_filter_traceback: tb_text = ''.join(filter_traceback(tb_text.splitlines(True))) result += tb_text return result
Formats errors w/async stack traces.
asynq/debug.py
format_error
quora/asynq
662
python
def format_error(error, tb=None): if (error is None): return None result = if (hasattr(error, '_traceback') or (tb is not None)): tb = (tb or error._traceback) tb_list = traceback.format_exception(error.__class__, error, tb) elif isinstance(error, BaseException): tb_list = traceback.format_exception_only(error.__class__, error) else: tb_list = [] tb_text = .join(tb_list) if isinstance(tb_text, bytes): tb_text = tb_text.decode('utf-8', 'replace') if _use_syntax_highlighting: tb_text = syntax_highlight_tb(tb_text) if _should_filter_traceback: tb_text = .join(filter_traceback(tb_text.splitlines(True))) result += tb_text return result
def format_error(error, tb=None): if (error is None): return None result = if (hasattr(error, '_traceback') or (tb is not None)): tb = (tb or error._traceback) tb_list = traceback.format_exception(error.__class__, error, tb) elif isinstance(error, BaseException): tb_list = traceback.format_exception_only(error.__class__, error) else: tb_list = [] tb_text = .join(tb_list) if isinstance(tb_text, bytes): tb_text = tb_text.decode('utf-8', 'replace') if _use_syntax_highlighting: tb_text = syntax_highlight_tb(tb_text) if _should_filter_traceback: tb_text = .join(filter_traceback(tb_text.splitlines(True))) result += tb_text return result<|docstring|>Formats errors w/async stack traces.<|endoftext|>
2c94e4f5343d31be2c2b1e9e8c7172e5e468e8086c327659ea88e77a56ba3aee
def extract_tb(tb, limit=None): 'This implementation is stolen from traceback module but respects __traceback_hide__.' if (limit is None): if hasattr(sys, 'tracebacklimit'): limit = sys.tracebacklimit tb_list = [] n = 0 while ((tb is not None) and ((limit is None) or (n < limit))): f = tb.tb_frame if (not _should_skip_frame(f)): lineno = tb.tb_lineno co = f.f_code filename = co.co_filename name = co.co_name linecache.checkcache(filename) line = linecache.getline(filename, lineno, f.f_globals) if line: line = line.strip() else: line = None tb_list.append((filename, lineno, name, line)) tb = tb.tb_next n = (n + 1) return tb_list
This implementation is stolen from traceback module but respects __traceback_hide__.
asynq/debug.py
extract_tb
quora/asynq
662
python
def extract_tb(tb, limit=None): if (limit is None): if hasattr(sys, 'tracebacklimit'): limit = sys.tracebacklimit tb_list = [] n = 0 while ((tb is not None) and ((limit is None) or (n < limit))): f = tb.tb_frame if (not _should_skip_frame(f)): lineno = tb.tb_lineno co = f.f_code filename = co.co_filename name = co.co_name linecache.checkcache(filename) line = linecache.getline(filename, lineno, f.f_globals) if line: line = line.strip() else: line = None tb_list.append((filename, lineno, name, line)) tb = tb.tb_next n = (n + 1) return tb_list
def extract_tb(tb, limit=None): if (limit is None): if hasattr(sys, 'tracebacklimit'): limit = sys.tracebacklimit tb_list = [] n = 0 while ((tb is not None) and ((limit is None) or (n < limit))): f = tb.tb_frame if (not _should_skip_frame(f)): lineno = tb.tb_lineno co = f.f_code filename = co.co_filename name = co.co_name linecache.checkcache(filename) line = linecache.getline(filename, lineno, f.f_globals) if line: line = line.strip() else: line = None tb_list.append((filename, lineno, name, line)) tb = tb.tb_next n = (n + 1) return tb_list<|docstring|>This implementation is stolen from traceback module but respects __traceback_hide__.<|endoftext|>
990e3d93c3ebf348c3f31c033a880e6624bb4e0b9e75332447dfe33548067949
def format_tb(tb): 'Formats a traceback into a list of lines.' return traceback.format_list(extract_tb(tb))
Formats a traceback into a list of lines.
asynq/debug.py
format_tb
quora/asynq
662
python
def format_tb(tb): return traceback.format_list(extract_tb(tb))
def format_tb(tb): return traceback.format_list(extract_tb(tb))<|docstring|>Formats a traceback into a list of lines.<|endoftext|>
e8abcaa3819dfc6b4c5e74c918e91045da1372f0a9d722f4c1ed675ca902eaee
def dump_stack(skip=0, limit=None): 'Dumps current stack trace.' skip += 2 if (limit is None): limit = options.STACK_DUMP_LIMIT print('--- Stack trace: -----------------------------------------------------') try: stack = traceback.extract_stack(limit=(None if (limit is None) else (limit + skip))) print(''.join(traceback.format_list(stack[:(- skip)])), end='') finally: print('----------------------------------------------------------------------') stdout.flush()
Dumps current stack trace.
asynq/debug.py
dump_stack
quora/asynq
662
python
def dump_stack(skip=0, limit=None): skip += 2 if (limit is None): limit = options.STACK_DUMP_LIMIT print('--- Stack trace: -----------------------------------------------------') try: stack = traceback.extract_stack(limit=(None if (limit is None) else (limit + skip))) print(.join(traceback.format_list(stack[:(- skip)])), end=) finally: print('----------------------------------------------------------------------') stdout.flush()
def dump_stack(skip=0, limit=None): skip += 2 if (limit is None): limit = options.STACK_DUMP_LIMIT print('--- Stack trace: -----------------------------------------------------') try: stack = traceback.extract_stack(limit=(None if (limit is None) else (limit + skip))) print(.join(traceback.format_list(stack[:(- skip)])), end=) finally: print('----------------------------------------------------------------------') stdout.flush()<|docstring|>Dumps current stack trace.<|endoftext|>
cdfa7308bcf08f2da4137df5dbc868b114e391efd49cdb2db9d6527b3b125432
def dump_asynq_stack(): 'Dumps the current asynq stack to stdout.' format_list = format_asynq_stack() if (format_list is None): print('dump_asynq_stack: no asynq task currently active') else: print('\n'.join(format_list))
Dumps the current asynq stack to stdout.
asynq/debug.py
dump_asynq_stack
quora/asynq
662
python
def dump_asynq_stack(): format_list = format_asynq_stack() if (format_list is None): print('dump_asynq_stack: no asynq task currently active') else: print('\n'.join(format_list))
def dump_asynq_stack(): format_list = format_asynq_stack() if (format_list is None): print('dump_asynq_stack: no asynq task currently active') else: print('\n'.join(format_list))<|docstring|>Dumps the current asynq stack to stdout.<|endoftext|>
adf528b93769d3521e9811965b99044ba872f7608859d045a85337d38eb723f1
def format_asynq_stack(): 'Returns a list of strings.\n\n Each string corresponds to one item in the current asynq stack.\n\n Returns None if there is no active asynq task.\n\n ' from .scheduler import get_scheduler active_task = get_scheduler().active_task if (active_task is not None): return active_task.traceback() else: return None
Returns a list of strings. Each string corresponds to one item in the current asynq stack. Returns None if there is no active asynq task.
asynq/debug.py
format_asynq_stack
quora/asynq
662
python
def format_asynq_stack(): 'Returns a list of strings.\n\n Each string corresponds to one item in the current asynq stack.\n\n Returns None if there is no active asynq task.\n\n ' from .scheduler import get_scheduler active_task = get_scheduler().active_task if (active_task is not None): return active_task.traceback() else: return None
def format_asynq_stack(): 'Returns a list of strings.\n\n Each string corresponds to one item in the current asynq stack.\n\n Returns None if there is no active asynq task.\n\n ' from .scheduler import get_scheduler active_task = get_scheduler().active_task if (active_task is not None): return active_task.traceback() else: return None<|docstring|>Returns a list of strings. Each string corresponds to one item in the current asynq stack. Returns None if there is no active asynq task.<|endoftext|>
7546349bc2c266c389ab5aef28632814a3215ab9f156086649d6b4ddade2b925
def async_exception_hook(type, error, tb): 'Exception hook capable of printing async stack traces.' stdout.flush() stderr.flush() if (_use_original_exc_handler and (original_hook is not None)): original_hook(type, error, tb) dump_error(error, tb=tb)
Exception hook capable of printing async stack traces.
asynq/debug.py
async_exception_hook
quora/asynq
662
python
def async_exception_hook(type, error, tb): stdout.flush() stderr.flush() if (_use_original_exc_handler and (original_hook is not None)): original_hook(type, error, tb) dump_error(error, tb=tb)
def async_exception_hook(type, error, tb): stdout.flush() stderr.flush() if (_use_original_exc_handler and (original_hook is not None)): original_hook(type, error, tb) dump_error(error, tb=tb)<|docstring|>Exception hook capable of printing async stack traces.<|endoftext|>
cedf17053e5f07991eaa8ac8a871065f383fa0c640515090d94139240bb35c0c
def ipython_custom_exception_handler(self, etype, value, tb, tb_offset=None): "Override ipython's exception handler to print async traceback." async_exception_hook(etype, value, tb) if _use_original_exc_handler: self.showtraceback()
Override ipython's exception handler to print async traceback.
asynq/debug.py
ipython_custom_exception_handler
quora/asynq
662
python
def ipython_custom_exception_handler(self, etype, value, tb, tb_offset=None): async_exception_hook(etype, value, tb) if _use_original_exc_handler: self.showtraceback()
def ipython_custom_exception_handler(self, etype, value, tb, tb_offset=None): async_exception_hook(etype, value, tb) if _use_original_exc_handler: self.showtraceback()<|docstring|>Override ipython's exception handler to print async traceback.<|endoftext|>
5019e365bcdc92d6bfafdb092a73816d9d9cc83030519732e8edc2d8f0bc460c
def attach_exception_hook(): 'Injects async exception hook into the sys.excepthook.' try: __IPYTHON__ except NameError: shell = None else: from IPython.core.getipython import get_ipython shell = get_ipython() if (shell is not None): shell.set_custom_exc((BaseException,), ipython_custom_exception_handler) else: global is_attached, original_hook if is_attached: sys.stderr.write('Warning: async exception hook was already attached.\n') return original_hook = sys.excepthook sys.excepthook = async_exception_hook is_attached = True
Injects async exception hook into the sys.excepthook.
asynq/debug.py
attach_exception_hook
quora/asynq
662
python
def attach_exception_hook(): try: __IPYTHON__ except NameError: shell = None else: from IPython.core.getipython import get_ipython shell = get_ipython() if (shell is not None): shell.set_custom_exc((BaseException,), ipython_custom_exception_handler) else: global is_attached, original_hook if is_attached: sys.stderr.write('Warning: async exception hook was already attached.\n') return original_hook = sys.excepthook sys.excepthook = async_exception_hook is_attached = True
def attach_exception_hook(): try: __IPYTHON__ except NameError: shell = None else: from IPython.core.getipython import get_ipython shell = get_ipython() if (shell is not None): shell.set_custom_exc((BaseException,), ipython_custom_exception_handler) else: global is_attached, original_hook if is_attached: sys.stderr.write('Warning: async exception hook was already attached.\n') return original_hook = sys.excepthook sys.excepthook = async_exception_hook is_attached = True<|docstring|>Injects async exception hook into the sys.excepthook.<|endoftext|>
85661786ae7aeacbd37397e09faedf208a42b7e36ab351277419e65bee68a45b
def detach_exception_hook(): 'Removes async exception hook into the sys.excepthook.' global is_attached, original_hook assert is_attached, "Async exception hook wasn't attached." sys.excepthook = original_hook is_attached = False
Removes async exception hook into the sys.excepthook.
asynq/debug.py
detach_exception_hook
quora/asynq
662
python
def detach_exception_hook(): global is_attached, original_hook assert is_attached, "Async exception hook wasn't attached." sys.excepthook = original_hook is_attached = False
def detach_exception_hook(): global is_attached, original_hook assert is_attached, "Async exception hook wasn't attached." sys.excepthook = original_hook is_attached = False<|docstring|>Removes async exception hook into the sys.excepthook.<|endoftext|>
c1342a3e7e468d6b2814994458eb6b72bc6f488cfaa5376ce648fbf20d0735bb
def get_frame(generator): 'Given a generator, returns its current frame.' if (getattr(generator, 'gi_frame', None) is not None): return generator.gi_frame return None
Given a generator, returns its current frame.
asynq/debug.py
get_frame
quora/asynq
662
python
def get_frame(generator): if (getattr(generator, 'gi_frame', None) is not None): return generator.gi_frame return None
def get_frame(generator): if (getattr(generator, 'gi_frame', None) is not None): return generator.gi_frame return None<|docstring|>Given a generator, returns its current frame.<|endoftext|>
4a1181dcdbf180f831b6ea7f1de4a1e5748c958ed02fb9d61a19a4b845dd194d
def filter_traceback(tb_list): 'Given a traceback as a list of strings, looks for common boilerplate and removes it.' '\n File "asynq/async_task.py", line 169, in asynq.async_task.AsyncTask._continue\n File "asynq/async_task.py", line 237, in asynq.async_task.AsyncTask._continue_on_generator\n File "asynq/async_task.py", line 209, in asynq.async_task.AsyncTask._continue_on_generator\n ' TASK_CONTINUE = (['asynq.async_task.AsyncTask._continue', 'asynq.async_task.AsyncTask._continue_on_generator', 'asynq.async_task.AsyncTask._continue_on_generator'], '___asynq_continue___') '\n File "asynq/decorators.py", line 161, in asynq.decorators.AsyncDecorator.__call__\n File "asynq/futures.py", line 54, in asynq.futures.FutureBase.value\n File "asynq/futures.py", line 63, in asynq.futures.FutureBase.value\n File "asynq/futures.py", line 153, in asynq.futures.FutureBase.raise_if_error\n File "<...>/python3.6/site-packages/qcore/errors.py", line 93, in reraise\n six.reraise(type(error), error, error._traceback)\n File "<...>/python3.6/site-packages/six.py", line 693, in reraise\n raise value\n ' FUTURE_BASE = (['asynq.decorators.AsyncDecorator.__call__', 'asynq.futures.FutureBase.value', 'asynq.futures.FutureBase.value', 'asynq.futures.FutureBase.raise_if_error', 'reraise', 'six.reraise', 'reraise', 'value'], '___asynq_future_raise_if_error___') '\n File "asynq/decorators.py", line 153, in asynq.decorators.AsyncDecorator.asynq\n File "asynq/decorators.py", line 203, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 203, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 204, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 275, in asynq.decorators.async_call\n ' CALL_PURE = (['asynq.decorators.AsyncDecorator.asynq', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.async_call'], '___asynq_call_pure___') REPLACEMENTS = [TASK_CONTINUE, FUTURE_BASE, CALL_PURE] output = [] i = 0 while (i < len(tb_list)): did_replacement = False for (text_to_match, replacement) in REPLACEMENTS: matches = True j = 0 while ((j < len(text_to_match)) and ((i + j) < len(tb_list))): if (text_to_match[j] not in tb_list[(i + j)]): matches = False break j += 1 if (matches and (j == len(text_to_match))): output.append(((' ' + replacement) + '\n')) i = (i + j) did_replacement = True break if (not did_replacement): output.append(tb_list[i]) i += 1 return output
Given a traceback as a list of strings, looks for common boilerplate and removes it.
asynq/debug.py
filter_traceback
quora/asynq
662
python
def filter_traceback(tb_list): '\n File "asynq/async_task.py", line 169, in asynq.async_task.AsyncTask._continue\n File "asynq/async_task.py", line 237, in asynq.async_task.AsyncTask._continue_on_generator\n File "asynq/async_task.py", line 209, in asynq.async_task.AsyncTask._continue_on_generator\n ' TASK_CONTINUE = (['asynq.async_task.AsyncTask._continue', 'asynq.async_task.AsyncTask._continue_on_generator', 'asynq.async_task.AsyncTask._continue_on_generator'], '___asynq_continue___') '\n File "asynq/decorators.py", line 161, in asynq.decorators.AsyncDecorator.__call__\n File "asynq/futures.py", line 54, in asynq.futures.FutureBase.value\n File "asynq/futures.py", line 63, in asynq.futures.FutureBase.value\n File "asynq/futures.py", line 153, in asynq.futures.FutureBase.raise_if_error\n File "<...>/python3.6/site-packages/qcore/errors.py", line 93, in reraise\n six.reraise(type(error), error, error._traceback)\n File "<...>/python3.6/site-packages/six.py", line 693, in reraise\n raise value\n ' FUTURE_BASE = (['asynq.decorators.AsyncDecorator.__call__', 'asynq.futures.FutureBase.value', 'asynq.futures.FutureBase.value', 'asynq.futures.FutureBase.raise_if_error', 'reraise', 'six.reraise', 'reraise', 'value'], '___asynq_future_raise_if_error___') '\n File "asynq/decorators.py", line 153, in asynq.decorators.AsyncDecorator.asynq\n File "asynq/decorators.py", line 203, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 203, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 204, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 275, in asynq.decorators.async_call\n ' CALL_PURE = (['asynq.decorators.AsyncDecorator.asynq', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.async_call'], '___asynq_call_pure___') REPLACEMENTS = [TASK_CONTINUE, FUTURE_BASE, CALL_PURE] output = [] i = 0 while (i < len(tb_list)): did_replacement = False for (text_to_match, replacement) in REPLACEMENTS: matches = True j = 0 while ((j < len(text_to_match)) and ((i + j) < len(tb_list))): if (text_to_match[j] not in tb_list[(i + j)]): matches = False break j += 1 if (matches and (j == len(text_to_match))): output.append(((' ' + replacement) + '\n')) i = (i + j) did_replacement = True break if (not did_replacement): output.append(tb_list[i]) i += 1 return output
def filter_traceback(tb_list): '\n File "asynq/async_task.py", line 169, in asynq.async_task.AsyncTask._continue\n File "asynq/async_task.py", line 237, in asynq.async_task.AsyncTask._continue_on_generator\n File "asynq/async_task.py", line 209, in asynq.async_task.AsyncTask._continue_on_generator\n ' TASK_CONTINUE = (['asynq.async_task.AsyncTask._continue', 'asynq.async_task.AsyncTask._continue_on_generator', 'asynq.async_task.AsyncTask._continue_on_generator'], '___asynq_continue___') '\n File "asynq/decorators.py", line 161, in asynq.decorators.AsyncDecorator.__call__\n File "asynq/futures.py", line 54, in asynq.futures.FutureBase.value\n File "asynq/futures.py", line 63, in asynq.futures.FutureBase.value\n File "asynq/futures.py", line 153, in asynq.futures.FutureBase.raise_if_error\n File "<...>/python3.6/site-packages/qcore/errors.py", line 93, in reraise\n six.reraise(type(error), error, error._traceback)\n File "<...>/python3.6/site-packages/six.py", line 693, in reraise\n raise value\n ' FUTURE_BASE = (['asynq.decorators.AsyncDecorator.__call__', 'asynq.futures.FutureBase.value', 'asynq.futures.FutureBase.value', 'asynq.futures.FutureBase.raise_if_error', 'reraise', 'six.reraise', 'reraise', 'value'], '___asynq_future_raise_if_error___') '\n File "asynq/decorators.py", line 153, in asynq.decorators.AsyncDecorator.asynq\n File "asynq/decorators.py", line 203, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 203, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 204, in asynq.decorators.AsyncProxyDecorator._call_pure\n File "asynq/decorators.py", line 275, in asynq.decorators.async_call\n ' CALL_PURE = (['asynq.decorators.AsyncDecorator.asynq', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.AsyncProxyDecorator._call_pure', 'asynq.decorators.async_call'], '___asynq_call_pure___') REPLACEMENTS = [TASK_CONTINUE, FUTURE_BASE, CALL_PURE] output = [] i = 0 while (i < len(tb_list)): did_replacement = False for (text_to_match, replacement) in REPLACEMENTS: matches = True j = 0 while ((j < len(text_to_match)) and ((i + j) < len(tb_list))): if (text_to_match[j] not in tb_list[(i + j)]): matches = False break j += 1 if (matches and (j == len(text_to_match))): output.append(((' ' + replacement) + '\n')) i = (i + j) did_replacement = True break if (not did_replacement): output.append(tb_list[i]) i += 1 return output<|docstring|>Given a traceback as a list of strings, looks for common boilerplate and removes it.<|endoftext|>
19f5edcdaa0d90166625c9659339173f8343c64b4623c1183803d0959029ca8f
def syntax_highlight_tb(tb_text): "Syntax highlights the traceback so that's a little easier to parse." lexer = lexers.get_lexer_by_name('pytb', stripall=True) return highlight(tb_text, lexer, formatters.TerminalFormatter())
Syntax highlights the traceback so that's a little easier to parse.
asynq/debug.py
syntax_highlight_tb
quora/asynq
662
python
def syntax_highlight_tb(tb_text): lexer = lexers.get_lexer_by_name('pytb', stripall=True) return highlight(tb_text, lexer, formatters.TerminalFormatter())
def syntax_highlight_tb(tb_text): lexer = lexers.get_lexer_by_name('pytb', stripall=True) return highlight(tb_text, lexer, formatters.TerminalFormatter())<|docstring|>Syntax highlights the traceback so that's a little easier to parse.<|endoftext|>
8717c877cd9ad0e694d4fbd068e4eaab7cf394130f140669f9a58252cb6007fd
def setup_module(): 'Called before any tests are performed.' print(('\n*** ' + __name__)) global c c = Concrete()
Called before any tests are performed.
dakotathon/tests/test_method_base_uq.py
setup_module
csdms/dakotathon
8
python
def setup_module(): print(('\n*** ' + __name__)) global c c = Concrete()
def setup_module(): print(('\n*** ' + __name__)) global c c = Concrete()<|docstring|>Called before any tests are performed.<|endoftext|>
92bdb430f56b4b8fad1b02aa2f8beedd8e5072fe28c0f131cfbfce121ab5f7e4
def teardown_module(): 'Called after all tests have completed.' pass
Called after all tests have completed.
dakotathon/tests/test_method_base_uq.py
teardown_module
csdms/dakotathon
8
python
def teardown_module(): pass
def teardown_module(): pass<|docstring|>Called after all tests have completed.<|endoftext|>
5bbb297b4d6e2c651da8b479ee7a75642410b689658271e062d5cee86cd21b7d
@raises(TypeError) def test_instantiate(): 'Test whether UncertaintyQuantificationBase fails to instantiate.' if (sys.version[0] == 2): d = UncertaintyQuantificationBase() else: raise TypeError
Test whether UncertaintyQuantificationBase fails to instantiate.
dakotathon/tests/test_method_base_uq.py
test_instantiate
csdms/dakotathon
8
python
@raises(TypeError) def test_instantiate(): if (sys.version[0] == 2): d = UncertaintyQuantificationBase() else: raise TypeError
@raises(TypeError) def test_instantiate(): if (sys.version[0] == 2): d = UncertaintyQuantificationBase() else: raise TypeError<|docstring|>Test whether UncertaintyQuantificationBase fails to instantiate.<|endoftext|>
25da4e540dddc4f84d04bc59e1d075b23f69ad4ecdd1169d1b0cf5cef658e192
def test_get_basis_polynomial_family(): 'Test getting the basis_polynomial_family property.' assert_true((type(c.basis_polynomial_family) is str))
Test getting the basis_polynomial_family property.
dakotathon/tests/test_method_base_uq.py
test_get_basis_polynomial_family
csdms/dakotathon
8
python
def test_get_basis_polynomial_family(): assert_true((type(c.basis_polynomial_family) is str))
def test_get_basis_polynomial_family(): assert_true((type(c.basis_polynomial_family) is str))<|docstring|>Test getting the basis_polynomial_family property.<|endoftext|>